1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Dalvik.h"
18#include "alloc/clz.h"
19#include "alloc/HeapBitmap.h"
20#include "alloc/HeapInternal.h"
21#include "alloc/HeapSource.h"
22#include "alloc/MarkSweep.h"
23#include <limits.h>     // for ULONG_MAX
24#include <sys/mman.h>   // for madvise(), mmap()
25#include <cutils/ashmem.h>
26#include <errno.h>
27
28#define GC_DEBUG_PARANOID   2
29#define GC_DEBUG_BASIC      1
30#define GC_DEBUG_OFF        0
31#define GC_DEBUG(l)         (GC_DEBUG_LEVEL >= (l))
32
33#if 1
34#define GC_DEBUG_LEVEL      GC_DEBUG_PARANOID
35#else
36#define GC_DEBUG_LEVEL      GC_DEBUG_OFF
37#endif
38
39#define VERBOSE_GC          0
40
41#define GC_LOG_TAG      LOG_TAG "-gc"
42
43#if LOG_NDEBUG
44#define LOGV_GC(...)    ((void)0)
45#define LOGD_GC(...)    ((void)0)
46#else
47#define LOGV_GC(...)    LOG(LOG_VERBOSE, GC_LOG_TAG, __VA_ARGS__)
48#define LOGD_GC(...)    LOG(LOG_DEBUG, GC_LOG_TAG, __VA_ARGS__)
49#endif
50
51#if VERBOSE_GC
52#define LOGVV_GC(...)   LOGV_GC(__VA_ARGS__)
53#else
54#define LOGVV_GC(...)   ((void)0)
55#endif
56
57#define LOGI_GC(...)    LOG(LOG_INFO, GC_LOG_TAG, __VA_ARGS__)
58#define LOGW_GC(...)    LOG(LOG_WARN, GC_LOG_TAG, __VA_ARGS__)
59#define LOGE_GC(...)    LOG(LOG_ERROR, GC_LOG_TAG, __VA_ARGS__)
60
61#define LOG_SCAN(...)   LOGV_GC("SCAN: " __VA_ARGS__)
62#define LOG_MARK(...)   LOGV_GC("MARK: " __VA_ARGS__)
63#define LOG_SWEEP(...)  LOGV_GC("SWEEP: " __VA_ARGS__)
64#define LOG_REF(...)    LOGV_GC("REF: " __VA_ARGS__)
65
66#define LOGV_SCAN(...)  LOGVV_GC("SCAN: " __VA_ARGS__)
67#define LOGV_MARK(...)  LOGVV_GC("MARK: " __VA_ARGS__)
68#define LOGV_SWEEP(...) LOGVV_GC("SWEEP: " __VA_ARGS__)
69#define LOGV_REF(...)   LOGVV_GC("REF: " __VA_ARGS__)
70
71#define ALIGN_UP_TO_PAGE_SIZE(p) \
72    (((size_t)(p) + (SYSTEM_PAGE_SIZE - 1)) & ~(SYSTEM_PAGE_SIZE - 1))
73
74/* Do not cast the result of this to a boolean; the only set bit
75 * may be > 1<<8.
76 */
77static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
78        __attribute__((always_inline));
79static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
80{
81    return dvmHeapBitmapIsObjectBitSetInList(ctx->bitmaps, ctx->numBitmaps, hc);
82}
83
84static bool
85createMarkStack(GcMarkStack *stack)
86{
87    const Object **limit;
88    size_t size;
89    int fd, err;
90
91    /* Create a stack big enough for the worst possible case,
92     * where the heap is perfectly full of the smallest object.
93     * TODO: be better about memory usage; use a smaller stack with
94     *       overflow detection and recovery.
95     */
96    size = dvmHeapSourceGetIdealFootprint() * sizeof(Object*) /
97            (sizeof(Object) + HEAP_SOURCE_CHUNK_OVERHEAD);
98    size = ALIGN_UP_TO_PAGE_SIZE(size);
99    fd = ashmem_create_region("dalvik-heap-markstack", size);
100    if (fd < 0) {
101        LOGE_GC("Could not create %d-byte ashmem mark stack: %s\n",
102            size, strerror(errno));
103        return false;
104    }
105    limit = (const Object **)mmap(NULL, size, PROT_READ | PROT_WRITE,
106            MAP_PRIVATE, fd, 0);
107    err = errno;
108    close(fd);
109    if (limit == MAP_FAILED) {
110        LOGE_GC("Could not mmap %d-byte ashmem mark stack: %s\n",
111            size, strerror(err));
112        return false;
113    }
114
115    memset(stack, 0, sizeof(*stack));
116    stack->limit = limit;
117    stack->base = (const Object **)((uintptr_t)limit + size);
118    stack->top = stack->base;
119
120    return true;
121}
122
123static void
124destroyMarkStack(GcMarkStack *stack)
125{
126    munmap((char *)stack->limit,
127            (uintptr_t)stack->base - (uintptr_t)stack->limit);
128    memset(stack, 0, sizeof(*stack));
129}
130
131#define MARK_STACK_PUSH(stack, obj) \
132    do { \
133        *--(stack).top = (obj); \
134    } while (false)
135
136bool
137dvmHeapBeginMarkStep()
138{
139    GcMarkContext *mc = &gDvm.gcHeap->markContext;
140    HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
141    size_t numBitmaps;
142
143    if (!createMarkStack(&mc->stack)) {
144        return false;
145    }
146
147    numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
148            HEAP_SOURCE_MAX_HEAP_COUNT);
149    if (numBitmaps == 0) {
150        return false;
151    }
152
153    /* Create mark bitmaps that cover the same ranges as the
154     * current object bitmaps.
155     */
156    if (!dvmHeapBitmapInitListFromTemplates(mc->bitmaps, objectBitmaps,
157            numBitmaps, "mark"))
158    {
159        return false;
160    }
161
162    mc->numBitmaps = numBitmaps;
163    mc->finger = NULL;
164
165    return true;
166}
167
168static long setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
169        __attribute__((always_inline));
170static long
171setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
172{
173    return dvmHeapBitmapSetAndReturnObjectBitInList(ctx->bitmaps,
174        ctx->numBitmaps, hc);
175}
176
177static void _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
178        bool checkFinger, bool forceStack)
179        __attribute__((always_inline));
180static void
181_markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
182        bool checkFinger, bool forceStack)
183{
184    DvmHeapChunk *hc;
185
186    assert(obj != NULL);
187
188#if GC_DEBUG(GC_DEBUG_PARANOID)
189//TODO: make sure we're locked
190    assert(obj != (Object *)gDvm.unlinkedJavaLangClass);
191    assert(dvmIsValidObject(obj));
192#endif
193
194    hc = ptr2chunk(obj);
195    if (!setAndReturnMarkBit(ctx, hc)) {
196        /* This object was not previously marked.
197         */
198        if (forceStack || (checkFinger && (void *)hc < ctx->finger)) {
199            /* This object will need to go on the mark stack.
200             */
201            MARK_STACK_PUSH(ctx->stack, obj);
202        }
203
204#if WITH_HPROF
205        if (gDvm.gcHeap->hprofContext != NULL) {
206            hprofMarkRootObject(gDvm.gcHeap->hprofContext, obj, 0);
207        }
208#endif
209#if DVM_TRACK_HEAP_MARKING
210        gDvm.gcHeap->markCount++;
211        gDvm.gcHeap->markSize += dvmHeapSourceChunkSize((void *)hc) +
212                HEAP_SOURCE_CHUNK_OVERHEAD;
213#endif
214
215        /* obj->clazz can be NULL if we catch an object between
216         * dvmMalloc() and DVM_OBJECT_INIT().  This is ok.
217         */
218        LOGV_MARK("0x%08x %s\n", (uint)obj,
219                obj->clazz == NULL ? "<null class>" : obj->clazz->name);
220    }
221}
222
223/* Used to mark objects when recursing.  Recursion is done by moving
224 * the finger across the bitmaps in address order and marking child
225 * objects.  Any newly-marked objects whose addresses are lower than
226 * the finger won't be visited by the bitmap scan, so those objects
227 * need to be added to the mark stack.
228 */
229static void
230markObjectNonNull(const Object *obj, GcMarkContext *ctx)
231{
232    _markObjectNonNullCommon(obj, ctx, true, false);
233}
234
235#define markObject(obj, ctx) \
236    do { \
237        Object *MO_obj_ = (Object *)(obj); \
238        if (MO_obj_ != NULL) { \
239            markObjectNonNull(MO_obj_, (ctx)); \
240        } \
241    } while (false)
242
243/* If the object hasn't already been marked, mark it and
244 * schedule it to be scanned for references.
245 *
246 * obj may not be NULL.  The macro dvmMarkObject() should
247 * be used in situations where a reference may be NULL.
248 *
249 * This function may only be called when marking the root
250 * set.  When recursing, use the internal markObject[NonNull]().
251 */
252void
253dvmMarkObjectNonNull(const Object *obj)
254{
255    _markObjectNonNullCommon(obj, &gDvm.gcHeap->markContext, false, false);
256}
257
258/* Mark the set of root objects.
259 *
260 * Things we need to scan:
261 * - System classes defined by root classloader
262 * - For each thread:
263 *   - Interpreted stack, from top to "curFrame"
264 *     - Dalvik registers (args + local vars)
265 *   - JNI local references
266 *   - Automatic VM local references (TrackedAlloc)
267 *   - Associated Thread/VMThread object
268 *   - ThreadGroups (could track & start with these instead of working
269 *     upward from Threads)
270 *   - Exception currently being thrown, if present
271 * - JNI global references
272 * - Interned string table
273 * - Primitive classes
274 * - Special objects
275 *   - gDvm.outOfMemoryObj
276 * - Objects allocated with ALLOC_NO_GC
277 * - Objects pending finalization (but not yet finalized)
278 * - Objects in debugger object registry
279 *
280 * Don't need:
281 * - Native stack (for in-progress stuff in the VM)
282 *   - The TrackedAlloc stuff watches all native VM references.
283 */
284void dvmHeapMarkRootSet()
285{
286    HeapRefTable *refs;
287    GcHeap *gcHeap;
288    Object **op;
289
290    gcHeap = gDvm.gcHeap;
291
292    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_STICKY_CLASS, 0);
293
294    LOG_SCAN("root class loader\n");
295    dvmGcScanRootClassLoader();
296    LOG_SCAN("primitive classes\n");
297    dvmGcScanPrimitiveClasses();
298
299    /* dvmGcScanRootThreadGroups() sets a bunch of
300     * different scan states internally.
301     */
302    HPROF_CLEAR_GC_SCAN_STATE();
303
304    LOG_SCAN("root thread groups\n");
305    dvmGcScanRootThreadGroups();
306
307    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_INTERNED_STRING, 0);
308
309    LOG_SCAN("interned strings\n");
310    dvmGcScanInternedStrings();
311
312    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_JNI_GLOBAL, 0);
313
314    LOG_SCAN("JNI global refs\n");
315    dvmGcMarkJniGlobalRefs();
316
317    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
318
319    LOG_SCAN("pending reference operations\n");
320    dvmHeapMarkLargeTableRefs(gcHeap->referenceOperations, true);
321
322    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
323
324    LOG_SCAN("pending finalizations\n");
325    dvmHeapMarkLargeTableRefs(gcHeap->pendingFinalizationRefs, false);
326
327    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_DEBUGGER, 0);
328
329    LOG_SCAN("debugger refs\n");
330    dvmGcMarkDebuggerRefs();
331
332    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_VM_INTERNAL, 0);
333
334    /* Mark all ALLOC_NO_GC objects.
335     */
336    LOG_SCAN("ALLOC_NO_GC objects\n");
337    refs = &gcHeap->nonCollectableRefs;
338    op = refs->table;
339    while ((uintptr_t)op < (uintptr_t)refs->nextEntry) {
340        dvmMarkObjectNonNull(*(op++));
341    }
342
343    /* Mark any special objects we have sitting around.
344     */
345    LOG_SCAN("special objects\n");
346    dvmMarkObjectNonNull(gDvm.outOfMemoryObj);
347    dvmMarkObjectNonNull(gDvm.internalErrorObj);
348    dvmMarkObjectNonNull(gDvm.noClassDefFoundErrorObj);
349    dvmMarkObject(gDvm.jniWeakGlobalRefQueue);
350//TODO: scan object references sitting in gDvm;  use pointer begin & end
351
352    HPROF_CLEAR_GC_SCAN_STATE();
353}
354
355/*
356 * Nothing past this point is allowed to use dvmMarkObject*().
357 * Scanning/recursion must use markObject*(), which takes the
358 * finger into account.
359 */
360#define dvmMarkObjectNonNull __dont_use_dvmMarkObjectNonNull__
361
362
363/* Mark all of a ClassObject's interfaces.
364 */
365static void markInterfaces(const ClassObject *clazz, GcMarkContext *ctx)
366{
367    ClassObject **interfaces;
368    int interfaceCount;
369    int i;
370
371    /* Mark all interfaces.
372     */
373    interfaces = clazz->interfaces;
374    interfaceCount = clazz->interfaceCount;
375    for (i = 0; i < interfaceCount; i++) {
376        markObjectNonNull((Object *)*interfaces, ctx);
377        interfaces++;
378    }
379}
380
381/* Mark all objects referred to by a ClassObject's static fields.
382 */
383static void scanStaticFields(const ClassObject *clazz, GcMarkContext *ctx)
384{
385    StaticField *f;
386    int i;
387
388    //TODO: Optimize this with a bit vector or something
389    f = clazz->sfields;
390    for (i = 0; i < clazz->sfieldCount; i++) {
391        char c = f->field.signature[0];
392        if (c == '[' || c == 'L') {
393            /* It's an array or class reference.
394             */
395            markObject((Object *)f->value.l, ctx);
396        }
397        f++;
398    }
399}
400
401/* Mark all objects referred to by a DataObject's instance fields.
402 */
403static void scanInstanceFields(const DataObject *obj, ClassObject *clazz,
404        GcMarkContext *ctx)
405{
406    if (clazz->refOffsets != CLASS_WALK_SUPER) {
407        unsigned int refOffsets = clazz->refOffsets;
408        while (refOffsets != 0) {
409            const int rshift = CLZ(refOffsets);
410            refOffsets &= ~(CLASS_HIGH_BIT >> rshift);
411            markObject(dvmGetFieldObject((Object*)obj,
412                                         CLASS_OFFSET_FROM_CLZ(rshift)), ctx);
413        }
414    } else {
415        while (clazz != NULL) {
416            InstField *f;
417            int i;
418
419            /* All of the fields that contain object references
420             * are guaranteed to be at the beginning of the ifields list.
421             */
422            f = clazz->ifields;
423            for (i = 0; i < clazz->ifieldRefCount; i++) {
424                /* Mark the array or object reference.
425                 * May be NULL.
426                 *
427                 * Note that, per the comment on struct InstField,
428                 * f->byteOffset is the offset from the beginning of
429                 * obj, not the offset into obj->instanceData.
430                 */
431                markObject(dvmGetFieldObject((Object*)obj, f->byteOffset), ctx);
432                f++;
433            }
434
435            /* This will be NULL when we hit java.lang.Object
436             */
437            clazz = clazz->super;
438        }
439    }
440}
441
442/* Mark all objects referred to by the array's contents.
443 */
444static void scanObjectArray(const ArrayObject *array, GcMarkContext *ctx)
445{
446    Object **contents;
447    u4 length;
448    u4 i;
449
450    contents = (Object **)array->contents;
451    length = array->length;
452
453    for (i = 0; i < length; i++) {
454        markObject(*contents, ctx); // may be NULL
455        contents++;
456    }
457}
458
459/* Mark all objects referred to by the ClassObject.
460 */
461static void scanClassObject(const ClassObject *clazz, GcMarkContext *ctx)
462{
463    LOGV_SCAN("---------> %s\n", clazz->name);
464
465    if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
466        /* We're an array; mark the class object of the contents
467         * of the array.
468         *
469         * Note that we won't necessarily reach the array's element
470         * class by scanning the array contents;  the array may be
471         * zero-length, or may only contain null objects.
472         */
473        markObjectNonNull((Object *)clazz->elementClass, ctx);
474    }
475
476    /* We scan these explicitly in case the only remaining
477     * reference to a particular class object is via a data
478     * object;  we may not be guaranteed to reach all
479     * live class objects via a classloader.
480     */
481    markObject((Object *)clazz->super, ctx);  // may be NULL (java.lang.Object)
482    markObject(clazz->classLoader, ctx);      // may be NULL
483
484    scanStaticFields(clazz, ctx);
485    markInterfaces(clazz, ctx);
486}
487
488/* Mark all objects that obj refers to.
489 *
490 * Called on every object in markList.
491 */
492static void scanObject(const Object *obj, GcMarkContext *ctx)
493{
494    ClassObject *clazz;
495
496    assert(dvmIsValidObject(obj));
497    LOGV_SCAN("0x%08x %s\n", (uint)obj, obj->clazz->name);
498
499#if WITH_HPROF
500    if (gDvm.gcHeap->hprofContext != NULL) {
501        hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj);
502    }
503#endif
504
505    /* Get and mark the class object for this particular instance.
506     */
507    clazz = obj->clazz;
508    if (clazz == NULL) {
509        /* This can happen if we catch an object between
510         * dvmMalloc() and DVM_OBJECT_INIT().  The object
511         * won't contain any references yet, so we can
512         * just skip it.
513         */
514        return;
515    } else if (clazz == gDvm.unlinkedJavaLangClass) {
516        /* This class hasn't been linked yet.  We're guaranteed
517         * that the object doesn't contain any references that
518         * aren't already tracked, so we can skip scanning it.
519         *
520         * NOTE: unlinkedJavaLangClass is not on the heap, so
521         * it's very important that we don't try marking it.
522         */
523        return;
524    }
525
526    assert(dvmIsValidObject((Object *)clazz));
527    markObjectNonNull((Object *)clazz, ctx);
528
529    /* Mark any references in this object.
530     */
531    if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
532        /* It's an array object.
533         */
534        if (IS_CLASS_FLAG_SET(clazz, CLASS_ISOBJECTARRAY)) {
535            /* It's an array of object references.
536             */
537            scanObjectArray((ArrayObject *)obj, ctx);
538        }
539        // else there's nothing else to scan
540    } else {
541        /* It's a DataObject-compatible object.
542         */
543        scanInstanceFields((DataObject *)obj, clazz, ctx);
544
545        if (IS_CLASS_FLAG_SET(clazz, CLASS_ISREFERENCE)) {
546            GcHeap *gcHeap = gDvm.gcHeap;
547            Object *referent;
548
549            /* It's a subclass of java/lang/ref/Reference.
550             * The fields in this class have been arranged
551             * such that scanInstanceFields() did not actually
552             * mark the "referent" field;  we need to handle
553             * it specially.
554             *
555             * If the referent already has a strong mark (isMarked(referent)),
556             * we don't care about its reference status.
557             */
558            referent = dvmGetFieldObject(obj,
559                    gDvm.offJavaLangRefReference_referent);
560            if (referent != NULL &&
561                    !isMarked(ptr2chunk(referent), &gcHeap->markContext))
562            {
563                u4 refFlags;
564
565                if (gcHeap->markAllReferents) {
566                    LOG_REF("Hard-marking a reference\n");
567
568                    /* Don't bother with normal reference-following
569                     * behavior, just mark the referent.  This should
570                     * only be used when following objects that just
571                     * became scheduled for finalization.
572                     */
573                    markObjectNonNull(referent, ctx);
574                    goto skip_reference;
575                }
576
577                /* See if this reference was handled by a previous GC.
578                 */
579                if (dvmGetFieldObject(obj,
580                            gDvm.offJavaLangRefReference_vmData) ==
581                        SCHEDULED_REFERENCE_MAGIC)
582                {
583                    LOG_REF("Skipping scheduled reference\n");
584
585                    /* Don't reschedule it, but make sure that its
586                     * referent doesn't get collected (in case it's
587                     * a PhantomReference and wasn't cleared automatically).
588                     */
589                    //TODO: Mark these after handling all new refs of
590                    //      this strength, in case the new refs refer
591                    //      to the same referent.  Not a very common
592                    //      case, though.
593                    markObjectNonNull(referent, ctx);
594                    goto skip_reference;
595                }
596
597                /* Find out what kind of reference is pointing
598                 * to referent.
599                 */
600                refFlags = GET_CLASS_FLAG_GROUP(clazz,
601                    CLASS_ISREFERENCE |
602                    CLASS_ISWEAKREFERENCE |
603                    CLASS_ISPHANTOMREFERENCE);
604
605            /* We use the vmData field of Reference objects
606             * as a next pointer in a singly-linked list.
607             * That way, we don't need to allocate any memory
608             * while we're doing a GC.
609             */
610#define ADD_REF_TO_LIST(list, ref) \
611            do { \
612                Object *ARTL_ref_ = (/*de-const*/Object *)(ref); \
613                dvmSetFieldObject(ARTL_ref_, \
614                        gDvm.offJavaLangRefReference_vmData, list); \
615                list = ARTL_ref_; \
616            } while (false)
617
618                /* At this stage, we just keep track of all of
619                 * the live references that we've seen.  Later,
620                 * we'll walk through each of these lists and
621                 * deal with the referents.
622                 */
623                if (refFlags == CLASS_ISREFERENCE) {
624                    /* It's a soft reference.  Depending on the state,
625                     * we'll attempt to collect all of them, some of
626                     * them, or none of them.
627                     */
628                    if (gcHeap->softReferenceCollectionState ==
629                            SR_COLLECT_NONE)
630                    {
631                sr_collect_none:
632                        markObjectNonNull(referent, ctx);
633                    } else if (gcHeap->softReferenceCollectionState ==
634                            SR_COLLECT_ALL)
635                    {
636                sr_collect_all:
637                        ADD_REF_TO_LIST(gcHeap->softReferences, obj);
638                    } else {
639                        /* We'll only try to collect half of the
640                         * referents.
641                         */
642                        if (gcHeap->softReferenceColor++ & 1) {
643                            goto sr_collect_none;
644                        }
645                        goto sr_collect_all;
646                    }
647                } else {
648                    /* It's a weak or phantom reference.
649                     * Clearing CLASS_ISREFERENCE will reveal which.
650                     */
651                    refFlags &= ~CLASS_ISREFERENCE;
652                    if (refFlags == CLASS_ISWEAKREFERENCE) {
653                        ADD_REF_TO_LIST(gcHeap->weakReferences, obj);
654                    } else if (refFlags == CLASS_ISPHANTOMREFERENCE) {
655                        ADD_REF_TO_LIST(gcHeap->phantomReferences, obj);
656                    } else {
657                        assert(!"Unknown reference type");
658                    }
659                }
660#undef ADD_REF_TO_LIST
661            }
662        }
663
664    skip_reference:
665        /* If this is a class object, mark various other things that
666         * its internals point to.
667         *
668         * All class objects are instances of java.lang.Class,
669         * including the java.lang.Class class object.
670         */
671        if (clazz == gDvm.classJavaLangClass) {
672            scanClassObject((ClassObject *)obj, ctx);
673        }
674    }
675}
676
677static void
678processMarkStack(GcMarkContext *ctx)
679{
680    const Object **const base = ctx->stack.base;
681
682    /* Scan anything that's on the mark stack.
683     * We can't use the bitmaps anymore, so use
684     * a finger that points past the end of them.
685     */
686    ctx->finger = (void *)ULONG_MAX;
687    while (ctx->stack.top != base) {
688        scanObject(*ctx->stack.top++, ctx);
689    }
690}
691
692#ifndef NDEBUG
693static uintptr_t gLastFinger = 0;
694#endif
695
696static bool
697scanBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
698{
699    GcMarkContext *ctx = (GcMarkContext *)arg;
700    size_t i;
701
702#ifndef NDEBUG
703    assert((uintptr_t)finger >= gLastFinger);
704    gLastFinger = (uintptr_t)finger;
705#endif
706
707    ctx->finger = finger;
708    for (i = 0; i < numPtrs; i++) {
709        /* The pointers we're getting back are DvmHeapChunks,
710         * not Objects.
711         */
712        scanObject(chunk2ptr(*ptrs++), ctx);
713    }
714
715    return true;
716}
717
718/* Given bitmaps with the root set marked, find and mark all
719 * reachable objects.  When this returns, the entire set of
720 * live objects will be marked and the mark stack will be empty.
721 */
722void dvmHeapScanMarkedObjects()
723{
724    GcMarkContext *ctx = &gDvm.gcHeap->markContext;
725
726    assert(ctx->finger == NULL);
727
728    /* The bitmaps currently have bits set for the root set.
729     * Walk across the bitmaps and scan each object.
730     */
731#ifndef NDEBUG
732    gLastFinger = 0;
733#endif
734    dvmHeapBitmapWalkList(ctx->bitmaps, ctx->numBitmaps,
735            scanBitmapCallback, ctx);
736
737    /* We've walked the mark bitmaps.  Scan anything that's
738     * left on the mark stack.
739     */
740    processMarkStack(ctx);
741
742    LOG_SCAN("done with marked objects\n");
743}
744
745/** Clear the referent field.
746 */
747static void clearReference(Object *reference)
748{
749    /* This is what the default implementation of Reference.clear()
750     * does.  We're required to clear all references to a given
751     * referent atomically, so we can't pop in and out of interp
752     * code each time.
753     *
754     * We don't ever actaully call overriding implementations of
755     * Reference.clear().
756     */
757    dvmSetFieldObject(reference,
758            gDvm.offJavaLangRefReference_referent, NULL);
759}
760
761/** @return true if we need to schedule a call to enqueue().
762 */
763static bool enqueueReference(Object *reference)
764{
765    Object *queue = dvmGetFieldObject(reference,
766            gDvm.offJavaLangRefReference_queue);
767    Object *queueNext = dvmGetFieldObject(reference,
768            gDvm.offJavaLangRefReference_queueNext);
769    if (queue == NULL || queueNext != NULL) {
770        /* There is no queue, or the reference has already
771         * been enqueued.  The Reference.enqueue() method
772         * will do nothing even if we call it.
773         */
774        return false;
775    }
776
777    /* We need to call enqueue(), but if we called it from
778     * here we'd probably deadlock.  Schedule a call.
779     */
780    return true;
781}
782
783/* All objects for stronger reference levels have been
784 * marked before this is called.
785 */
786void dvmHeapHandleReferences(Object *refListHead, enum RefType refType)
787{
788    Object *reference;
789    GcMarkContext *markContext = &gDvm.gcHeap->markContext;
790    const int offVmData = gDvm.offJavaLangRefReference_vmData;
791    const int offReferent = gDvm.offJavaLangRefReference_referent;
792    bool workRequired = false;
793
794    reference = refListHead;
795    while (reference != NULL) {
796        Object *next;
797        Object *referent;
798
799        /* Pull the interesting fields out of the Reference object.
800         */
801        next = dvmGetFieldObject(reference, offVmData);
802        referent = dvmGetFieldObject(reference, offReferent);
803
804        //TODO: when handling REF_PHANTOM, unlink any references
805        //      that fail this initial if().  We need to re-walk
806        //      the list, and it would be nice to avoid the extra
807        //      work.
808        if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
809            bool schedEnqueue;
810
811            /* This is the strongest reference that refers to referent.
812             * Do the right thing.
813             */
814            switch (refType) {
815            case REF_SOFT:
816            case REF_WEAK:
817                clearReference(reference);
818                schedEnqueue = enqueueReference(reference);
819                break;
820            case REF_PHANTOM:
821                /* PhantomReferences are not cleared automatically.
822                 * Until someone clears it (or the reference itself
823                 * is collected), the referent must remain alive.
824                 *
825                 * It's necessary to fully mark the referent because
826                 * it will still be present during the next GC, and
827                 * all objects that it points to must be valid.
828                 * (The referent will be marked outside of this loop,
829                 * after handing all references of this strength, in
830                 * case multiple references point to the same object.)
831                 *
832                 * One exception: JNI "weak global" references are handled
833                 * as a special case.  They're identified by the queue.
834                 */
835                if (gDvm.jniWeakGlobalRefQueue != NULL) {
836                    Object* queue = dvmGetFieldObject(reference,
837                            gDvm.offJavaLangRefReference_queue);
838                    if (queue == gDvm.jniWeakGlobalRefQueue) {
839                        LOGV("+++ WGR: clearing + not queueing %p:%p\n",
840                            reference, referent);
841                        clearReference(reference);
842                        schedEnqueue = false;
843                        break;
844                    }
845                }
846
847                /* A PhantomReference is only useful with a
848                 * queue, but since it's possible to create one
849                 * without a queue, we need to check.
850                 */
851                schedEnqueue = enqueueReference(reference);
852                break;
853            default:
854                assert(!"Bad reference type");
855                schedEnqueue = false;
856                break;
857            }
858
859            if (schedEnqueue) {
860                uintptr_t workBits;
861
862                /* Stuff the enqueue bit in the bottom of the pointer.
863                 * Assumes that objects are 8-byte aligned.
864                 *
865                 * Note that we are adding the *Reference* (which
866                 * is by definition already marked at this point) to
867                 * this list; we're not adding the referent (which
868                 * has already been cleared).
869                 */
870                assert(((intptr_t)reference & 3) == 0);
871                assert((WORKER_ENQUEUE & ~3) == 0);
872                if (!dvmHeapAddRefToLargeTable(
873                        &gDvm.gcHeap->referenceOperations,
874                        (Object *)((uintptr_t)reference | WORKER_ENQUEUE)))
875                {
876                    LOGE_HEAP("dvmMalloc(): no room for any more "
877                            "reference operations\n");
878                    dvmAbort();
879                }
880                workRequired = true;
881            }
882
883            if (refType != REF_PHANTOM) {
884                /* Let later GCs know not to reschedule this reference.
885                 */
886                dvmSetFieldObject(reference, offVmData,
887                        SCHEDULED_REFERENCE_MAGIC);
888            } // else this is handled later for REF_PHANTOM
889
890        } // else there was a stronger reference to the referent.
891
892        reference = next;
893    }
894
895    /* Walk though the reference list again, and mark any non-clear/marked
896     * referents.  Only PhantomReferences can have non-clear referents
897     * at this point.
898     *
899     * (Could skip this for JNI weak globals, since we know they've been
900     * cleared.)
901     */
902    if (refType == REF_PHANTOM) {
903        bool scanRequired = false;
904
905        HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
906        reference = refListHead;
907        while (reference != NULL) {
908            Object *next;
909            Object *referent;
910
911            /* Pull the interesting fields out of the Reference object.
912             */
913            next = dvmGetFieldObject(reference, offVmData);
914            referent = dvmGetFieldObject(reference, offReferent);
915
916            if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
917                markObjectNonNull(referent, markContext);
918                scanRequired = true;
919
920                /* Let later GCs know not to reschedule this reference.
921                 */
922                dvmSetFieldObject(reference, offVmData,
923                        SCHEDULED_REFERENCE_MAGIC);
924            }
925
926            reference = next;
927        }
928        HPROF_CLEAR_GC_SCAN_STATE();
929
930        if (scanRequired) {
931            processMarkStack(markContext);
932        }
933    }
934
935    if (workRequired) {
936        dvmSignalHeapWorker(false);
937    }
938}
939
940
941/* Find unreachable objects that need to be finalized,
942 * and schedule them for finalization.
943 */
944void dvmHeapScheduleFinalizations()
945{
946    HeapRefTable newPendingRefs;
947    LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs;
948    Object **ref;
949    Object **lastRef;
950    size_t totalPendCount;
951    GcMarkContext *markContext = &gDvm.gcHeap->markContext;
952
953    /*
954     * All reachable objects have been marked.
955     * Any unmarked finalizable objects need to be finalized.
956     */
957
958    /* Create a table that the new pending refs will
959     * be added to.
960     */
961    if (!dvmHeapInitHeapRefTable(&newPendingRefs, 128)) {
962        //TODO: mark all finalizable refs and hope that
963        //      we can schedule them next time.  Watch out,
964        //      because we may be expecting to free up space
965        //      by calling finalizers.
966        LOGE_GC("dvmHeapScheduleFinalizations(): no room for "
967                "pending finalizations\n");
968        dvmAbort();
969    }
970
971    /* Walk through finalizableRefs and move any unmarked references
972     * to the list of new pending refs.
973     */
974    totalPendCount = 0;
975    while (finRefs != NULL) {
976        Object **gapRef;
977        size_t newPendCount = 0;
978
979        gapRef = ref = finRefs->refs.table;
980        lastRef = finRefs->refs.nextEntry;
981        while (ref < lastRef) {
982            DvmHeapChunk *hc;
983
984            hc = ptr2chunk(*ref);
985            if (!isMarked(hc, markContext)) {
986                if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) {
987                    //TODO: add the current table and allocate
988                    //      a new, smaller one.
989                    LOGE_GC("dvmHeapScheduleFinalizations(): "
990                            "no room for any more pending finalizations: %zd\n",
991                            dvmHeapNumHeapRefTableEntries(&newPendingRefs));
992                    dvmAbort();
993                }
994                newPendCount++;
995            } else {
996                /* This ref is marked, so will remain on finalizableRefs.
997                 */
998                if (newPendCount > 0) {
999                    /* Copy it up to fill the holes.
1000                     */
1001                    *gapRef++ = *ref;
1002                } else {
1003                    /* No holes yet; don't bother copying.
1004                     */
1005                    gapRef++;
1006                }
1007            }
1008            ref++;
1009        }
1010        finRefs->refs.nextEntry = gapRef;
1011        //TODO: if the table is empty when we're done, free it.
1012        totalPendCount += newPendCount;
1013        finRefs = finRefs->next;
1014    }
1015    LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n",
1016            totalPendCount);
1017    if (totalPendCount == 0) {
1018        /* No objects required finalization.
1019         * Free the empty temporary table.
1020         */
1021        dvmClearReferenceTable(&newPendingRefs);
1022        return;
1023    }
1024
1025    /* Add the new pending refs to the main list.
1026     */
1027    if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs,
1028                &newPendingRefs))
1029    {
1030        LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new "
1031                "pending finalizations\n");
1032        dvmAbort();
1033    }
1034
1035    //TODO: try compacting the main list with a memcpy loop
1036
1037    /* Mark the refs we just moved;  we don't want them or their
1038     * children to get swept yet.
1039     */
1040    ref = newPendingRefs.table;
1041    lastRef = newPendingRefs.nextEntry;
1042    assert(ref < lastRef);
1043    HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
1044    while (ref < lastRef) {
1045        markObjectNonNull(*ref, markContext);
1046        ref++;
1047    }
1048    HPROF_CLEAR_GC_SCAN_STATE();
1049
1050    /* Set markAllReferents so that we don't collect referents whose
1051     * only references are in final-reachable objects.
1052     * TODO: eventually provide normal reference behavior by properly
1053     *       marking these references.
1054     */
1055    gDvm.gcHeap->markAllReferents = true;
1056    processMarkStack(markContext);
1057    gDvm.gcHeap->markAllReferents = false;
1058
1059    dvmSignalHeapWorker(false);
1060}
1061
1062void dvmHeapFinishMarkStep()
1063{
1064    HeapBitmap *markBitmap;
1065    HeapBitmap objectBitmap;
1066    GcMarkContext *markContext;
1067
1068    markContext = &gDvm.gcHeap->markContext;
1069
1070    /* The sweep step freed every object that appeared in the
1071     * HeapSource bitmaps that didn't appear in the mark bitmaps.
1072     * The new state of the HeapSource is exactly the final
1073     * mark bitmaps, so swap them in.
1074     *
1075     * The old bitmaps will be swapped into the context so that
1076     * we can clean them up.
1077     */
1078    dvmHeapSourceReplaceObjectBitmaps(markContext->bitmaps,
1079            markContext->numBitmaps);
1080
1081    /* Clean up the old HeapSource bitmaps and anything else associated
1082     * with the marking process.
1083     */
1084    dvmHeapBitmapDeleteList(markContext->bitmaps, markContext->numBitmaps);
1085    destroyMarkStack(&markContext->stack);
1086
1087    memset(markContext, 0, sizeof(*markContext));
1088}
1089
1090#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1091static bool
1092hprofUnreachableBitmapCallback(size_t numPtrs, void **ptrs,
1093        const void *finger, void *arg)
1094{
1095    hprof_context_t *hctx = (hprof_context_t *)arg;
1096    size_t i;
1097
1098    for (i = 0; i < numPtrs; i++) {
1099        Object *obj;
1100
1101        /* The pointers we're getting back are DvmHeapChunks, not
1102         * Objects.
1103         */
1104        obj = (Object *)chunk2ptr(*ptrs++);
1105
1106        hprofMarkRootObject(hctx, obj, 0);
1107        hprofDumpHeapObject(hctx, obj);
1108    }
1109
1110    return true;
1111}
1112
1113static void
1114hprofDumpUnmarkedObjects(const HeapBitmap markBitmaps[],
1115        const HeapBitmap objectBitmaps[], size_t numBitmaps)
1116{
1117    hprof_context_t *hctx = gDvm.gcHeap->hprofContext;
1118    if (hctx == NULL) {
1119        return;
1120    }
1121
1122    LOGI("hprof: dumping unreachable objects\n");
1123
1124    HPROF_SET_GC_SCAN_STATE(HPROF_UNREACHABLE, 0);
1125
1126    dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1127            hprofUnreachableBitmapCallback, hctx);
1128
1129    HPROF_CLEAR_GC_SCAN_STATE();
1130}
1131#endif
1132
1133static bool
1134sweepBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
1135{
1136    const ClassObject *const classJavaLangClass = gDvm.classJavaLangClass;
1137    size_t i;
1138    void **origPtrs = ptrs;
1139
1140    for (i = 0; i < numPtrs; i++) {
1141        DvmHeapChunk *hc;
1142        Object *obj;
1143
1144        /* The pointers we're getting back are DvmHeapChunks, not
1145         * Objects.
1146         */
1147        hc = (DvmHeapChunk *)*ptrs++;
1148        obj = (Object *)chunk2ptr(hc);
1149
1150        /* NOTE: Dereferencing clazz is dangerous.  If obj was the last
1151         * one to reference its class object, the class object could be
1152         * on the sweep list, and could already have been swept, leaving
1153         * us with a stale pointer.
1154         */
1155        LOGV_SWEEP("FREE: 0x%08x %s\n", (uint)obj, obj->clazz->name);
1156
1157        /* This assumes that java.lang.Class will never go away.
1158         * If it can, and we were the last reference to it, it
1159         * could have already been swept.  However, even in that case,
1160         * gDvm.classJavaLangClass should still have a useful
1161         * value.
1162         */
1163        if (obj->clazz == classJavaLangClass) {
1164            LOGV_SWEEP("---------------> %s\n", ((ClassObject *)obj)->name);
1165            /* dvmFreeClassInnards() may have already been called,
1166             * but it's safe to call on the same ClassObject twice.
1167             */
1168            dvmFreeClassInnards((ClassObject *)obj);
1169        }
1170
1171#if 0
1172        /* Overwrite the to-be-freed object to make stale references
1173         * more obvious.
1174         */
1175        {
1176            int chunklen;
1177            ClassObject *clazz = obj->clazz;
1178            chunklen = dvmHeapSourceChunkSize(hc);
1179            memset(hc, 0xa5, chunklen);
1180            obj->clazz = (ClassObject *)((uintptr_t)clazz ^ 0xffffffff);
1181        }
1182#endif
1183    }
1184    // TODO: dvmHeapSourceFreeList has a loop, just like the above
1185    // does. Consider collapsing the two loops to save overhead.
1186    dvmHeapSourceFreeList(numPtrs, origPtrs);
1187
1188    return true;
1189}
1190
1191/* Returns true if the given object is unmarked.  Ignores the low bits
1192 * of the pointer because the intern table may set them.
1193 */
1194static int isUnmarkedObject(void *object)
1195{
1196    return !isMarked(ptr2chunk((uintptr_t)object & ~(HB_OBJECT_ALIGNMENT-1)),
1197            &gDvm.gcHeap->markContext);
1198}
1199
1200/* Walk through the list of objects that haven't been
1201 * marked and free them.
1202 */
1203void
1204dvmHeapSweepUnmarkedObjects(int *numFreed, size_t *sizeFreed)
1205{
1206    const HeapBitmap *markBitmaps;
1207    const GcMarkContext *markContext;
1208    HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
1209    size_t origObjectsAllocated;
1210    size_t origBytesAllocated;
1211    size_t numBitmaps;
1212
1213    /* All reachable objects have been marked.
1214     * Detach any unreachable interned strings before
1215     * we sweep.
1216     */
1217    dvmGcDetachDeadInternedStrings(isUnmarkedObject);
1218
1219    /* Free any known objects that are not marked.
1220     */
1221    origObjectsAllocated = dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1222    origBytesAllocated = dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1223
1224    markContext = &gDvm.gcHeap->markContext;
1225    markBitmaps = markContext->bitmaps;
1226    numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
1227            HEAP_SOURCE_MAX_HEAP_COUNT);
1228#ifndef NDEBUG
1229    if (numBitmaps != markContext->numBitmaps) {
1230        LOGE("heap bitmap count mismatch: %zd != %zd\n",
1231                numBitmaps, markContext->numBitmaps);
1232        dvmAbort();
1233    }
1234#endif
1235
1236#if WITH_HPROF && WITH_HPROF_UNREACHABLE
1237    hprofDumpUnmarkedObjects(markBitmaps, objectBitmaps, numBitmaps);
1238#endif
1239
1240    dvmSweepMonitorList(&gDvm.monitorList, isUnmarkedObject);
1241
1242    dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1243            sweepBitmapCallback, NULL);
1244
1245    *numFreed = origObjectsAllocated -
1246            dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1247    *sizeFreed = origBytesAllocated -
1248            dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1249
1250#ifdef WITH_PROFILER
1251    if (gDvm.allocProf.enabled) {
1252        gDvm.allocProf.freeCount += *numFreed;
1253        gDvm.allocProf.freeSize += *sizeFreed;
1254    }
1255#endif
1256}
1257