summaryrefslogtreecommitdiffstats
path: root/js/src/gc/RootMarking.cpp
blob: 223bb70740160441b8b616fa23d1f20caaf44e00 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
 * This Source Code Form is subject to the terms of the Mozilla Public
 * License, v. 2.0. If a copy of the MPL was not distributed with this
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */

#include "mozilla/ArrayUtils.h"

#ifdef MOZ_VALGRIND
# include <valgrind/memcheck.h>
#endif

#include "jscntxt.h"
#include "jsgc.h"
#include "jsprf.h"
#include "jstypes.h"

#include "builtin/MapObject.h"
#include "frontend/BytecodeCompiler.h"
#include "gc/GCInternals.h"
#include "gc/Marking.h"
#include "jit/MacroAssembler.h"
#include "js/HashTable.h"
#include "vm/Debugger.h"
#include "vm/JSONParser.h"

#include "jsgcinlines.h"
#include "jsobjinlines.h"

using namespace js;
using namespace js::gc;

using mozilla::ArrayEnd;

using JS::AutoGCRooter;

typedef RootedValueMap::Range RootRange;
typedef RootedValueMap::Entry RootEntry;
typedef RootedValueMap::Enum RootEnum;

template <typename T>
using TraceFunction = void (*)(JSTracer* trc, T* ref, const char* name);

// For more detail see JS::Rooted::ptr and js::DispatchWrapper.
//
// The JS::RootKind::Traceable list contains a bunch of totally disparate
// types, but the instantiations of DispatchWrapper below need /something/ in
// the type field. We use the following type as a compatible stand-in. No
// actual methods from ConcreteTraceable type are actually used at runtime --
// the real trace function has been stored inline in the DispatchWrapper.
struct ConcreteTraceable {
    ConcreteTraceable() { MOZ_CRASH("instantiation of ConcreteTraceable"); }
    void trace(JSTracer*) {}
};

template <typename T, TraceFunction<T> TraceFn = TraceNullableRoot>
static inline void
MarkExactStackRootList(JSTracer* trc, JS::Rooted<void*>* rooter, const char* name)
{
    while (rooter) {
        T* addr = reinterpret_cast<JS::Rooted<T>*>(rooter)->address();
        TraceFn(trc, addr, name);
        rooter = rooter->previous();
    }
}

static inline void
TraceStackRoots(JSTracer* trc, RootedListHeads& stackRoots)
{
#define MARK_ROOTS(name, type, _) \
    MarkExactStackRootList<type*>(trc, stackRoots[JS::RootKind::name], "exact-" #name);
JS_FOR_EACH_TRACEKIND(MARK_ROOTS)
#undef MARK_ROOTS
    MarkExactStackRootList<jsid>(trc, stackRoots[JS::RootKind::Id], "exact-id");
    MarkExactStackRootList<Value>(trc, stackRoots[JS::RootKind::Value], "exact-value");
    MarkExactStackRootList<ConcreteTraceable,
                           js::DispatchWrapper<ConcreteTraceable>::TraceWrapped>(
        trc, stackRoots[JS::RootKind::Traceable], "Traceable");
}

void
js::RootLists::traceStackRoots(JSTracer* trc)
{
    TraceStackRoots(trc, stackRoots_);
}

static void
MarkExactStackRoots(JSRuntime* rt, JSTracer* trc)
{
    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next())
        TraceStackRoots(trc, zone->stackRoots_);
    rt->contextFromMainThread()->roots.traceStackRoots(trc);
}

template <typename T, TraceFunction<T> TraceFn = TraceNullableRoot>
static inline void
MarkPersistentRootedList(JSTracer* trc, mozilla::LinkedList<PersistentRooted<void*>>& list,
                         const char* name)
{
    for (PersistentRooted<void*>* r : list)
        TraceFn(trc, reinterpret_cast<PersistentRooted<T>*>(r)->address(), name);
}

void
js::RootLists::tracePersistentRoots(JSTracer* trc)
{
#define MARK_ROOTS(name, type, _) \
    MarkPersistentRootedList<type*>(trc, heapRoots_[JS::RootKind::name], "persistent-" #name);
JS_FOR_EACH_TRACEKIND(MARK_ROOTS)
#undef MARK_ROOTS
    MarkPersistentRootedList<jsid>(trc, heapRoots_[JS::RootKind::Id], "persistent-id");
    MarkPersistentRootedList<Value>(trc, heapRoots_[JS::RootKind::Value], "persistent-value");
    MarkPersistentRootedList<ConcreteTraceable,
                             js::DispatchWrapper<ConcreteTraceable>::TraceWrapped>(trc,
            heapRoots_[JS::RootKind::Traceable], "persistent-traceable");
}

static void
MarkPersistentRooted(JSRuntime* rt, JSTracer* trc)
{
    rt->contextFromMainThread()->roots.tracePersistentRoots(trc);
}

template <typename T>
static void
FinishPersistentRootedChain(mozilla::LinkedList<PersistentRooted<void*>>& listArg)
{
    auto& list = reinterpret_cast<mozilla::LinkedList<PersistentRooted<T>>&>(listArg);
    while (!list.isEmpty())
        list.getFirst()->reset();
}

void
js::RootLists::finishPersistentRoots()
{
#define FINISH_ROOT_LIST(name, type, _) \
    FinishPersistentRootedChain<type*>(heapRoots_[JS::RootKind::name]);
JS_FOR_EACH_TRACEKIND(FINISH_ROOT_LIST)
#undef FINISH_ROOT_LIST
    FinishPersistentRootedChain<jsid>(heapRoots_[JS::RootKind::Id]);
    FinishPersistentRootedChain<Value>(heapRoots_[JS::RootKind::Value]);

    // Note that we do not finalize the Traceable list as we do not know how to
    // safely clear memebers. We instead assert that none escape the RootLists.
    // See the comment on RootLists::~RootLists for details.
}

inline void
AutoGCRooter::trace(JSTracer* trc)
{
    switch (tag_) {
      case PARSER:
        frontend::MarkParser(trc, this);
        return;

      case VALARRAY: {
        /*
         * We don't know the template size parameter, but we can safely treat it
         * as an AutoValueArray<1> because the length is stored separately.
         */
        AutoValueArray<1>* array = static_cast<AutoValueArray<1>*>(this);
        TraceRootRange(trc, array->length(), array->begin(), "js::AutoValueArray");
        return;
      }

      case IONMASM: {
        static_cast<js::jit::MacroAssembler::AutoRooter*>(this)->masm()->trace(trc);
        return;
      }

      case WRAPPER: {
        /*
         * We need to use TraceManuallyBarrieredEdge here because we mark
         * wrapper roots in every slice. This is because of some rule-breaking
         * in RemapAllWrappersForObject; see comment there.
         */
        TraceManuallyBarrieredEdge(trc, &static_cast<AutoWrapperRooter*>(this)->value.get(),
                                   "JS::AutoWrapperRooter.value");
        return;
      }

      case WRAPVECTOR: {
        AutoWrapperVector::VectorImpl& vector = static_cast<AutoWrapperVector*>(this)->vector;
        /*
         * We need to use TraceManuallyBarrieredEdge here because we mark
         * wrapper roots in every slice. This is because of some rule-breaking
         * in RemapAllWrappersForObject; see comment there.
         */
        for (WrapperValue* p = vector.begin(); p < vector.end(); p++)
            TraceManuallyBarrieredEdge(trc, &p->get(), "js::AutoWrapperVector.vector");
        return;
      }

      case CUSTOM:
        static_cast<JS::CustomAutoRooter*>(this)->trace(trc);
        return;
    }

    MOZ_ASSERT(tag_ >= 0);
    if (Value* vp = static_cast<AutoArrayRooter*>(this)->array)
        TraceRootRange(trc, tag_, vp, "JS::AutoArrayRooter.array");
}

/* static */ void
AutoGCRooter::traceAll(JSTracer* trc)
{
    for (AutoGCRooter* gcr = trc->runtime()->contextFromMainThread()->roots.autoGCRooters_; gcr; gcr = gcr->down)
        gcr->trace(trc);
}

/* static */ void
AutoGCRooter::traceAllWrappers(JSTracer* trc)
{
    JSContext* cx = trc->runtime()->contextFromMainThread();

    for (AutoGCRooter* gcr = cx->roots.autoGCRooters_; gcr; gcr = gcr->down) {
        if (gcr->tag_ == WRAPVECTOR || gcr->tag_ == WRAPPER)
            gcr->trace(trc);
    }
}

void
StackShape::trace(JSTracer* trc)
{
    if (base)
        TraceRoot(trc, &base, "StackShape base");

    TraceRoot(trc, (jsid*) &propid, "StackShape id");

    if ((attrs & JSPROP_GETTER) && rawGetter)
        TraceRoot(trc, (JSObject**)&rawGetter, "StackShape getter");

    if ((attrs & JSPROP_SETTER) && rawSetter)
        TraceRoot(trc, (JSObject**)&rawSetter, "StackShape setter");
}

void
PropertyDescriptor::trace(JSTracer* trc)
{
    if (obj)
        TraceRoot(trc, &obj, "Descriptor::obj");
    TraceRoot(trc, &value, "Descriptor::value");
    if ((attrs & JSPROP_GETTER) && getter) {
        JSObject* tmp = JS_FUNC_TO_DATA_PTR(JSObject*, getter);
        TraceRoot(trc, &tmp, "Descriptor::get");
        getter = JS_DATA_TO_FUNC_PTR(JSGetterOp, tmp);
    }
    if ((attrs & JSPROP_SETTER) && setter) {
        JSObject* tmp = JS_FUNC_TO_DATA_PTR(JSObject*, setter);
        TraceRoot(trc, &tmp, "Descriptor::set");
        setter = JS_DATA_TO_FUNC_PTR(JSSetterOp, tmp);
    }
}

void
js::gc::GCRuntime::traceRuntimeForMajorGC(JSTracer* trc, AutoLockForExclusiveAccess& lock)
{
    // FinishRoots will have asserted that every root that we do not expect
    // is gone, so we can simply skip traceRuntime here.
    if (rt->isBeingDestroyed())
        return;

    gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_ROOTS);
    if (rt->atomsCompartment(lock)->zone()->isCollecting())
        traceRuntimeAtoms(trc, lock);
    JSCompartment::traceIncomingCrossCompartmentEdgesForZoneGC(trc);
    traceRuntimeCommon(trc, MarkRuntime, lock);
}

void
js::gc::GCRuntime::traceRuntimeForMinorGC(JSTracer* trc, AutoLockForExclusiveAccess& lock)
{
    // Note that we *must* trace the runtime during the SHUTDOWN_GC's minor GC
    // despite having called FinishRoots already. This is because FinishRoots
    // does not clear the crossCompartmentWrapper map. It cannot do this
    // because Proxy's trace for CrossCompartmentWrappers asserts presence in
    // the map. And we can reach its trace function despite having finished the
    // roots via the edges stored by the pre-barrier verifier when we finish
    // the verifier for the last time.
    gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_ROOTS);

    // FIXME: As per bug 1298816 comment 12, we should be able to remove this.
    jit::JitRuntime::MarkJitcodeGlobalTableUnconditionally(trc);

    traceRuntimeCommon(trc, TraceRuntime, lock);
}

void
js::TraceRuntime(JSTracer* trc)
{
    MOZ_ASSERT(!trc->isMarkingTracer());

    JSRuntime* rt = trc->runtime();
    rt->gc.evictNursery();
    AutoPrepareForTracing prep(rt->contextFromMainThread(), WithAtoms);
    gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_TRACE_HEAP);
    rt->gc.traceRuntime(trc, prep.session().lock);
}

void
js::gc::GCRuntime::traceRuntime(JSTracer* trc, AutoLockForExclusiveAccess& lock)
{
    MOZ_ASSERT(!rt->isBeingDestroyed());

    gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_ROOTS);
    traceRuntimeAtoms(trc, lock);
    traceRuntimeCommon(trc, TraceRuntime, lock);
}

void
js::gc::GCRuntime::traceRuntimeAtoms(JSTracer* trc, AutoLockForExclusiveAccess& lock)
{
    gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_RUNTIME_DATA);
    MarkPermanentAtoms(trc);
    MarkAtoms(trc, lock);
    MarkWellKnownSymbols(trc);
    jit::JitRuntime::Mark(trc, lock);
}

void
js::gc::GCRuntime::traceRuntimeCommon(JSTracer* trc, TraceOrMarkRuntime traceOrMark,
                                      AutoLockForExclusiveAccess& lock)
{
    MOZ_ASSERT(!rt->mainThread.suppressGC);

    {
        gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_STACK);

        // Trace active interpreter and JIT stack roots.
        MarkInterpreterActivations(rt, trc);
        jit::MarkJitActivations(rt, trc);

        // Trace legacy C stack roots.
        AutoGCRooter::traceAll(trc);

        for (RootRange r = rootsHash.all(); !r.empty(); r.popFront()) {
            const RootEntry& entry = r.front();
            TraceRoot(trc, entry.key(), entry.value());
        }

        // Trace C stack roots.
        MarkExactStackRoots(rt, trc);
    }

    // Trace runtime global roots.
    MarkPersistentRooted(rt, trc);

    // Trace the self-hosting global compartment.
    rt->markSelfHostingGlobal(trc);

    // Trace the shared Intl data.
    rt->traceSharedIntlData(trc);

    // Trace anything in the single context. Note that this is actually the
    // same struct as the JSRuntime, but is still split for historical reasons.
    rt->contextFromMainThread()->mark(trc);

    // Trace all compartment roots, but not the compartment itself; it is
    // marked via the parent pointer if traceRoots actually traces anything.
    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
        c->traceRoots(trc, traceOrMark);

    // Trace SPS.
    rt->spsProfiler.trace(trc);

    // Trace helper thread roots.
    HelperThreadState().trace(trc);

    // Trace the embedding's black and gray roots.
    if (!rt->isHeapMinorCollecting()) {
        gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_EMBEDDING);

        /*
         * The embedding can register additional roots here.
         *
         * We don't need to trace these in a minor GC because all pointers into
         * the nursery should be in the store buffer, and we want to avoid the
         * time taken to trace all these roots.
         */
        for (size_t i = 0; i < blackRootTracers.length(); i++) {
            const Callback<JSTraceDataOp>& e = blackRootTracers[i];
            (*e.op)(trc, e.data);
        }

        /* During GC, we don't mark gray roots at this stage. */
        if (JSTraceDataOp op = grayRootTracer.op) {
            if (traceOrMark == TraceRuntime)
                (*op)(trc, grayRootTracer.data);
        }
    }
}

#ifdef DEBUG
class AssertNoRootsTracer : public JS::CallbackTracer
{
    void onChild(const JS::GCCellPtr& thing) override {
        MOZ_CRASH("There should not be any roots after finishRoots");
    }

  public:
    AssertNoRootsTracer(JSRuntime* rt, WeakMapTraceKind weakTraceKind)
      : JS::CallbackTracer(rt, weakTraceKind)
    {}
};
#endif // DEBUG

void
js::gc::GCRuntime::finishRoots()
{
    rt->finishAtoms();

    if (rootsHash.initialized())
        rootsHash.clear();

    rt->contextFromMainThread()->roots.finishPersistentRoots();

    rt->finishSelfHosting();

    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
        c->finishRoots();

#ifdef DEBUG
    // The nsWrapperCache may not be empty before our shutdown GC, so we have
    // to skip that table when verifying that we are fully unrooted.
    auto prior = grayRootTracer;
    grayRootTracer = Callback<JSTraceDataOp>(nullptr, nullptr);

    AssertNoRootsTracer trc(rt, TraceWeakMapKeysValues);
    AutoPrepareForTracing prep(rt->contextFromMainThread(), WithAtoms);
    gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_TRACE_HEAP);
    traceRuntime(&trc, prep.session().lock);

    // Restore the wrapper tracing so that we leak instead of leaving dangling
    // pointers.
    grayRootTracer = prior;
#endif // DEBUG
}

// Append traced things to a buffer on the zone for use later in the GC.
// See the comment in GCRuntime.h above grayBufferState for details.
class BufferGrayRootsTracer : public JS::CallbackTracer
{
    // Set to false if we OOM while buffering gray roots.
    bool bufferingGrayRootsFailed;

    void onChild(const JS::GCCellPtr& thing) override;

  public:
    explicit BufferGrayRootsTracer(JSRuntime* rt)
      : JS::CallbackTracer(rt), bufferingGrayRootsFailed(false)
    {}

    bool failed() const { return bufferingGrayRootsFailed; }

#ifdef DEBUG
    TracerKind getTracerKind() const override { return TracerKind::GrayBuffering; }
#endif
};

#ifdef DEBUG
// Return true if this trace is happening on behalf of gray buffering during
// the marking phase of incremental GC.
bool
js::IsBufferGrayRootsTracer(JSTracer* trc)
{
    return trc->isCallbackTracer() &&
           trc->asCallbackTracer()->getTracerKind() == JS::CallbackTracer::TracerKind::GrayBuffering;
}
#endif

void
js::gc::GCRuntime::bufferGrayRoots()
{
    // Precondition: the state has been reset to "unused" after the last GC
    //               and the zone's buffers have been cleared.
    MOZ_ASSERT(grayBufferState == GrayBufferState::Unused);
    for (GCZonesIter zone(rt); !zone.done(); zone.next())
        MOZ_ASSERT(zone->gcGrayRoots.empty());

    gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS);

    BufferGrayRootsTracer grayBufferer(rt);
    if (JSTraceDataOp op = grayRootTracer.op)
        (*op)(&grayBufferer, grayRootTracer.data);

    // Propagate the failure flag from the marker to the runtime.
    if (grayBufferer.failed()) {
      grayBufferState = GrayBufferState::Failed;
      resetBufferedGrayRoots();
    } else {
      grayBufferState = GrayBufferState::Okay;
    }
}

struct SetMaybeAliveFunctor {
    template <typename T> void operator()(T* t) { SetMaybeAliveFlag(t); }
};

void
BufferGrayRootsTracer::onChild(const JS::GCCellPtr& thing)
{
    MOZ_ASSERT(runtime()->isHeapBusy());
    MOZ_RELEASE_ASSERT(thing);
    // Check if |thing| is corrupt by calling a method that touches the heap.
    MOZ_RELEASE_ASSERT(thing.asCell()->getTraceKind() <= JS::TraceKind::Null);

    if (bufferingGrayRootsFailed)
        return;

    gc::TenuredCell* tenured = gc::TenuredCell::fromPointer(thing.asCell());

    Zone* zone = tenured->zone();
    if (zone->isCollecting()) {
        // See the comment on SetMaybeAliveFlag to see why we only do this for
        // objects and scripts. We rely on gray root buffering for this to work,
        // but we only need to worry about uncollected dead compartments during
        // incremental GCs (when we do gray root buffering).
        DispatchTyped(SetMaybeAliveFunctor(), thing);

        if (!zone->gcGrayRoots.append(tenured))
            bufferingGrayRootsFailed = true;
    }
}

void
GCRuntime::markBufferedGrayRoots(JS::Zone* zone)
{
    MOZ_ASSERT(grayBufferState == GrayBufferState::Okay);
    MOZ_ASSERT(zone->isGCMarkingGray() || zone->isGCCompacting());

    for (auto cell : zone->gcGrayRoots)
        TraceManuallyBarrieredGenericPointerEdge(&marker, &cell, "buffered gray root");
}

void
GCRuntime::resetBufferedGrayRoots() const
{
    MOZ_ASSERT(grayBufferState != GrayBufferState::Okay,
               "Do not clear the gray buffers unless we are Failed or becoming Unused");
    for (GCZonesIter zone(rt); !zone.done(); zone.next())
        zone->gcGrayRoots.clearAndFree();
}