diff options
Diffstat (limited to 'js')
-rw-r--r-- | js/ipc/JavaScriptShared.cpp | 9 | ||||
-rw-r--r-- | js/ipc/JavaScriptShared.h | 1 | ||||
-rw-r--r-- | js/ipc/WrapperAnswer.cpp | 2 | ||||
-rw-r--r-- | js/src/builtin/TypedObject.cpp | 10 | ||||
-rw-r--r-- | js/src/gc/GCRuntime.h | 4 | ||||
-rw-r--r-- | js/src/gc/RootMarking.cpp | 2 | ||||
-rw-r--r-- | js/src/gc/Zone.cpp | 15 | ||||
-rw-r--r-- | js/src/gc/Zone.h | 11 | ||||
-rw-r--r-- | js/src/jsgc.cpp | 172 | ||||
-rw-r--r-- | js/src/jsgc.h | 11 | ||||
-rw-r--r-- | js/src/jswatchpoint.cpp | 19 |
11 files changed, 166 insertions, 90 deletions
diff --git a/js/ipc/JavaScriptShared.cpp b/js/ipc/JavaScriptShared.cpp index 9786243f2..961a3f910 100644 --- a/js/ipc/JavaScriptShared.cpp +++ b/js/ipc/JavaScriptShared.cpp @@ -61,6 +61,15 @@ IdToObjectMap::find(ObjectId id) return p->value(); } +JSObject* +IdToObjectMap::findPreserveColor(ObjectId id) +{ + Table::Ptr p = table_.lookup(id); + if (!p) + return nullptr; + return p->value().unbarrieredGet(); +} + bool IdToObjectMap::add(ObjectId id, JSObject* obj) { diff --git a/js/ipc/JavaScriptShared.h b/js/ipc/JavaScriptShared.h index 4de153826..5ecec7429 100644 --- a/js/ipc/JavaScriptShared.h +++ b/js/ipc/JavaScriptShared.h @@ -96,6 +96,7 @@ class IdToObjectMap bool add(ObjectId id, JSObject* obj); JSObject* find(ObjectId id); + JSObject* findPreserveColor(ObjectId id); void remove(ObjectId id); void clear(); diff --git a/js/ipc/WrapperAnswer.cpp b/js/ipc/WrapperAnswer.cpp index fc342bbb6..563f8f90d 100644 --- a/js/ipc/WrapperAnswer.cpp +++ b/js/ipc/WrapperAnswer.cpp @@ -789,7 +789,7 @@ WrapperAnswer::RecvDOMInstanceOf(const ObjectId& objId, const int& prototypeID, bool WrapperAnswer::RecvDropObject(const ObjectId& objId) { - JSObject* obj = objects_.find(objId); + JSObject* obj = objects_.findPreserveColor(objId); if (obj) { objectIdMap(objId.hasXrayWaiver()).remove(obj); objects_.remove(objId); diff --git a/js/src/builtin/TypedObject.cpp b/js/src/builtin/TypedObject.cpp index ae74f01bf..0dfc1123a 100644 --- a/js/src/builtin/TypedObject.cpp +++ b/js/src/builtin/TypedObject.cpp @@ -652,7 +652,7 @@ ArrayMetaTypeDescr::create(JSContext* cx, if (!CreateTraceList(cx, obj)) return nullptr; - if (!cx->zone()->typeDescrObjects.put(obj)) { + if (!cx->zone()->addTypeDescrObject(cx, obj)) { ReportOutOfMemory(cx); return nullptr; } @@ -993,8 +993,8 @@ StructMetaTypeDescr::create(JSContext* cx, if (!CreateTraceList(cx, descr)) return nullptr; - if (!cx->zone()->typeDescrObjects.put(descr) || - !cx->zone()->typeDescrObjects.put(fieldTypeVec)) + if (!cx->zone()->addTypeDescrObject(cx, descr) || + !cx->zone()->addTypeDescrObject(cx, fieldTypeVec)) { ReportOutOfMemory(cx); return nullptr; @@ -1165,10 +1165,8 @@ DefineSimpleTypeDescr(JSContext* cx, if (!CreateTraceList(cx, descr)) return false; - if (!cx->zone()->typeDescrObjects.put(descr)) { - ReportOutOfMemory(cx); + if (!cx->zone()->addTypeDescrObject(cx, descr)) return false; - } return true; } diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h index 5c2576efd..f102e9ef0 100644 --- a/js/src/gc/GCRuntime.h +++ b/js/src/gc/GCRuntime.h @@ -900,7 +900,8 @@ class GCRuntime void requestMajorGC(JS::gcreason::Reason reason); SliceBudget defaultBudget(JS::gcreason::Reason reason, int64_t millis); - void budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& lock); + void budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget, + AutoLockForExclusiveAccess& lock); void resetIncrementalGC(AbortReason reason, AutoLockForExclusiveAccess& lock); // Assert if the system state is such that we should never @@ -915,6 +916,7 @@ class GCRuntime void collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) JS_HAZ_GC_CALL; MOZ_MUST_USE bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason::Reason reason); + bool shouldRepeatForDeadZone(JS::gcreason::Reason reason); void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock); diff --git a/js/src/gc/RootMarking.cpp b/js/src/gc/RootMarking.cpp index 93264084b..f5969bc1f 100644 --- a/js/src/gc/RootMarking.cpp +++ b/js/src/gc/RootMarking.cpp @@ -478,6 +478,7 @@ js::gc::GCRuntime::bufferGrayRoots() for (GCZonesIter zone(rt); !zone.done(); zone.next()) MOZ_ASSERT(zone->gcGrayRoots.empty()); + gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS); BufferGrayRootsTracer grayBufferer(rt); if (JSTraceDataOp op = grayRootTracer.op) @@ -540,4 +541,3 @@ GCRuntime::resetBufferedGrayRoots() const for (GCZonesIter zone(rt); !zone.done(); zone.next()) zone->gcGrayRoots.clearAndFree(); } - diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp index ed099341c..f0cdde012 100644 --- a/js/src/gc/Zone.cpp +++ b/js/src/gc/Zone.cpp @@ -370,6 +370,21 @@ Zone::fixupAfterMovingGC() fixupInitialShapeTable(); } +bool +Zone::addTypeDescrObject(JSContext* cx, HandleObject obj) +{ + // Type descriptor objects are always tenured so we don't need post barriers + // on the set. + MOZ_ASSERT(!IsInsideNursery(obj)); + + if (!typeDescrObjects.put(obj)) { + ReportOutOfMemory(cx); + return false; + } + + return true; +} + ZoneList::ZoneList() : head(nullptr), tail(nullptr) {} diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h index 50d06319d..c8520ed55 100644 --- a/js/src/gc/Zone.h +++ b/js/src/gc/Zone.h @@ -349,10 +349,17 @@ struct Zone : public JS::shadow::Zone, // Keep track of all TypeDescr and related objects in this compartment. // This is used by the GC to trace them all first when compacting, since the // TypedObject trace hook may access these objects. - using TypeDescrObjectSet = js::GCHashSet<js::HeapPtr<JSObject*>, - js::MovableCellHasher<js::HeapPtr<JSObject*>>, + + // + // There are no barriers here - the set contains only tenured objects so no + // post-barrier is required, and these are weak references so no pre-barrier + // is required. + using TypeDescrObjectSet = js::GCHashSet<JSObject*, + js::MovableCellHasher<JSObject*>, js::SystemAllocPolicy>; JS::WeakCache<TypeDescrObjectSet> typeDescrObjects; + + bool addTypeDescrObject(JSContext* cx, HandleObject obj); // Malloc counter to measure memory pressure for GC scheduling. It runs from diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 8cee9ec09..194468c5d 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1524,19 +1524,11 @@ GCMarker::delayMarkingChildren(const void* thing) } inline void -ArenaLists::prepareForIncrementalGC(JSRuntime* rt) +ArenaLists::prepareForIncrementalGC() { + purge(); for (auto i : AllAllocKinds()) { - FreeSpan* span = freeLists[i]; - if (span != &placeholder) { - if (!span->isEmpty()) { - Arena* arena = span->getArena(); - arena->allocatedDuringIncremental = true; - rt->gc.marker.delayMarkingArena(arena); - } else { - freeLists[i] = &placeholder; - } - } + arenaLists[i].moveCursorToEnd(); } } @@ -2251,7 +2243,7 @@ GCRuntime::updateTypeDescrObjects(MovingTracer* trc, Zone* zone) { zone->typeDescrObjects.sweep(); for (auto r = zone->typeDescrObjects.all(); !r.empty(); r.popFront()) - UpdateCellPointers(trc, r.front().get()); + UpdateCellPointers(trc, r.front()); } void @@ -3579,6 +3571,23 @@ RelazifyFunctions(Zone* zone, AllocKind kind) } } +static bool +ShouldCollectZone(Zone* zone, JS::gcreason::Reason reason) +{ + // Normally we collect all scheduled zones. + if (reason != JS::gcreason::COMPARTMENT_REVIVED) + return zone->isGCScheduled(); + + // If we are repeating a GC because we noticed dead compartments haven't + // been collected, then only collect zones containing those compartments. + for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) { + if (comp->scheduledForDestruction) + return true; + } + + return false; +} + bool GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock) { @@ -3602,7 +3611,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces #endif /* Set up which zones will be collected. */ - if (zone->isGCScheduled()) { + if (ShouldCollectZone(zone, reason)) { if (!zone->isAtomsZone()) { any = true; zone->setGCState(Zone::Mark); @@ -3621,7 +3630,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next()) { c->marked = false; c->scheduledForDestruction = false; - c->maybeAlive = false; + c->maybeAlive = c->hasBeenEntered() || !c->zone()->isGCScheduled(); if (shouldPreserveJITCode(c, currentTime, reason, canAllocateMoreCode)) c->zone()->setPreservingCode(true); } @@ -3641,6 +3650,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces * on. If the value of keepAtoms() changes between GC slices, then we'll * cancel the incremental GC. See IsIncrementalGCSafe. */ + if (isFull && !rt->keepAtoms()) { Zone* atomsZone = rt->atomsCompartment(lock)->zone(); if (atomsZone->isGCScheduled()) { @@ -3655,15 +3665,12 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces return false; /* - * At the end of each incremental slice, we call prepareForIncrementalGC, - * which marks objects in all arenas that we're currently allocating - * into. This can cause leaks if unreachable objects are in these - * arenas. This purge call ensures that we only mark arenas that have had - * allocations after the incremental GC started. + * Ensure that after the start of a collection we don't allocate into any + * existing arenas, as this can cause unreachable things to be marked. */ if (isIncremental) { for (GCZonesIter zone(rt); !zone.done(); zone.next()) - zone->arenas.purge(); + zone->arenas.prepareForIncrementalGC(); } MemProfiler::MarkTenuredStart(rt); @@ -3748,12 +3755,10 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces gcstats::AutoPhase ap2(stats, gcstats::PHASE_MARK_ROOTS); if (isIncremental) { - gcstats::AutoPhase ap3(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS); bufferGrayRoots(); + markCompartments(); } - - markCompartments(); - + return true; } @@ -3766,9 +3771,14 @@ GCRuntime::markCompartments() * This code ensures that if a compartment is "dead", then it will be * collected in this GC. A compartment is considered dead if its maybeAlive * flag is false. The maybeAlive flag is set if: - * (1) the compartment has incoming cross-compartment edges, or - * (2) an object in the compartment was marked during root marking, either - * as a black root or a gray root. + * (1) the compartment has been entered (set in beginMarkPhase() above) + * (2) the compartment is not being collected (set in beginMarkPhase() + * above) + * (3) an object in the compartment was marked during root marking, either + * as a black root or a gray root (set in RootMarking.cpp), or + * (4) the compartment has incoming cross-compartment edges from another + * compartment that has maybeAlive set (set by this method). + * * If the maybeAlive is false, then we set the scheduledForDestruction flag. * At the end of the GC, we look for compartments where * scheduledForDestruction is true. These are compartments that were somehow @@ -3786,26 +3796,37 @@ GCRuntime::markCompartments() * allocation and read barriers during JS_TransplantObject and the like. */ - /* Set the maybeAlive flag based on cross-compartment edges. */ - for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { - for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) { + /* Propagate the maybeAlive flag via cross-compartment edges. */ + + Vector<JSCompartment*, 0, js::SystemAllocPolicy> workList; + + for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) { + if (comp->maybeAlive) { + if (!workList.append(comp)) + return; + } + } + while (!workList.empty()) { + JSCompartment* comp = workList.popCopy(); + for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) { if (e.front().key().is<JSString*>()) continue; JSCompartment* dest = e.front().mutableKey().compartment(); - if (dest) + if (dest && !dest->maybeAlive) { dest->maybeAlive = true; + if (!workList.append(dest)) + return; + } } } - /* - * For black roots, code in gc/Marking.cpp will already have set maybeAlive - * during MarkRuntime. - */ - - /* Propogate maybeAlive to scheduleForDestruction. */ - for (GCCompartmentsIter c(rt); !c.done(); c.next()) { - if (!c->maybeAlive && !rt->isAtomsCompartment(c)) - c->scheduledForDestruction = true; + + /* Set scheduleForDestruction based on maybeAlive. */ + + for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) { + MOZ_ASSERT(!comp->scheduledForDestruction); + if (!comp->maybeAlive && !rt->isAtomsCompartment(comp)) + comp->scheduledForDestruction = true; } } @@ -5306,7 +5327,7 @@ AutoGCSlice::~AutoGCSlice() for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) { if (zone->isGCMarking()) { zone->setNeedsIncrementalBarrier(true, Zone::UpdateJit); - zone->arenas.prepareForIncrementalGC(runtime); + zone->arenas.purge(); } else { zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit); } @@ -5487,7 +5508,7 @@ gc::AbortReason gc::IsIncrementalGCUnsafe(JSRuntime* rt) { MOZ_ASSERT(!rt->mainThread.suppressGC); - + if (rt->keepAtoms()) return gc::AbortReason::KeepAtomsSet; @@ -5498,9 +5519,17 @@ gc::IsIncrementalGCUnsafe(JSRuntime* rt) } void -GCRuntime::budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& lock) +GCRuntime::budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget, + AutoLockForExclusiveAccess& lock) { AbortReason unsafeReason = IsIncrementalGCUnsafe(rt); + if (unsafeReason == AbortReason::None) { + if (reason == JS::gcreason::COMPARTMENT_REVIVED) + unsafeReason = gc::AbortReason::CompartmentRevived; + else if (mode != JSGC_MODE_INCREMENTAL) + unsafeReason = gc::AbortReason::ModeChange; + } + if (unsafeReason != AbortReason::None) { resetIncrementalGC(unsafeReason, lock); budget.makeUnlimited(); @@ -5508,12 +5537,7 @@ GCRuntime::budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& return; } - if (mode != JSGC_MODE_INCREMENTAL) { - resetIncrementalGC(AbortReason::ModeChange, lock); - budget.makeUnlimited(); - stats.nonincremental(AbortReason::ModeChange); - return; - } + if (isTooMuchMalloc()) { budget.makeUnlimited(); @@ -5672,7 +5696,7 @@ GCRuntime::gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason:: stats.nonincremental(gc::AbortReason::NonIncrementalRequested); budget.makeUnlimited(); } else { - budgetIncrementalGC(budget, session.lock); + budgetIncrementalGC(reason, budget, session.lock); } /* The GC was reset, so we need a do-over. */ @@ -5764,6 +5788,22 @@ GCRuntime::checkIfGCAllowedInCurrentState(JS::gcreason::Reason reason) return true; } +bool +GCRuntime::shouldRepeatForDeadZone(JS::gcreason::Reason reason) +{ + MOZ_ASSERT_IF(reason == JS::gcreason::COMPARTMENT_REVIVED, !isIncremental); + + if (!isIncremental || isIncrementalGCInProgress()) + return false; + + for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { + if (c->scheduledForDestruction) + return true; + } + + return false; +} + void GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) { @@ -5782,27 +5822,23 @@ GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::R do { poked = false; bool wasReset = gcCycle(nonincrementalByAPI, budget, reason); - - /* Need to re-schedule all zones for GC. */ - if (poked && cleanUpEverything) + + bool repeatForDeadZone = false; + if (poked && cleanUpEverything) { + /* Need to re-schedule all zones for GC. */ JS::PrepareForFullGC(rt->contextFromMainThread()); - /* - * This code makes an extra effort to collect compartments that we - * thought were dead at the start of the GC. See the large comment in - * beginMarkPhase. - */ - bool repeatForDeadZone = false; - if (!nonincrementalByAPI && !isIncrementalGCInProgress()) { - for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { - if (c->scheduledForDestruction) { - nonincrementalByAPI = true; - repeatForDeadZone = true; - reason = JS::gcreason::COMPARTMENT_REVIVED; - c->zone()->scheduleGC(); - } - } + + } else if (shouldRepeatForDeadZone(reason) && !wasReset) { + /* + * This code makes an extra effort to collect compartments that we + * thought were dead at the start of the GC. See the large comment + * in beginMarkPhase. + */ + repeatForDeadZone = true; + reason = JS::gcreason::COMPARTMENT_REVIVED; } + /* * If we reset an existing GC, we need to start a new one. Also, we diff --git a/js/src/jsgc.h b/js/src/jsgc.h index d3cf31fe7..952fd6bae 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -61,7 +61,8 @@ enum class State { D(ModeChange) \ D(MallocBytesTrigger) \ D(GCBytesTrigger) \ - D(ZoneChange) + D(ZoneChange) \ + D(CompartmentRevived) enum class AbortReason { #define MAKE_REASON(name) name, GC_ABORT_REASONS(MAKE_REASON) @@ -453,6 +454,12 @@ class ArenaList { check(); return !*cursorp_; } + + void moveCursorToEnd() { + while (!isCursorAtEnd()) { + cursorp_ = &(*cursorp_)->next; + } + } // This can return nullptr. Arena* arenaAfterCursor() const { @@ -739,7 +746,7 @@ class ArenaLists freeLists[i] = &placeholder; } - inline void prepareForIncrementalGC(JSRuntime* rt); + inline void prepareForIncrementalGC(); /* Check if this arena is in use. */ bool arenaIsInUse(Arena* arena, AllocKind kind) const { diff --git a/js/src/jswatchpoint.cpp b/js/src/jswatchpoint.cpp index 3cf43e219..e37323555 100644 --- a/js/src/jswatchpoint.cpp +++ b/js/src/jswatchpoint.cpp @@ -185,17 +185,18 @@ WatchpointMap::markAll(JSTracer* trc) { for (Map::Enum e(map); !e.empty(); e.popFront()) { Map::Entry& entry = e.front(); - WatchKey key = entry.key(); - WatchKey prior = key; - MOZ_ASSERT(JSID_IS_STRING(prior.id) || JSID_IS_INT(prior.id) || JSID_IS_SYMBOL(prior.id)); - - TraceEdge(trc, const_cast<PreBarrieredObject*>(&key.object), - "held Watchpoint object"); - TraceEdge(trc, const_cast<PreBarrieredId*>(&key.id), "WatchKey::id"); + JSObject* object = entry.key().object; + jsid id = entry.key().id; + JSObject* priorObject = object; + jsid priorId = id; + MOZ_ASSERT(JSID_IS_STRING(priorId) || JSID_IS_INT(priorId) || JSID_IS_SYMBOL(priorId)); + + TraceManuallyBarrieredEdge(trc, &object, "held Watchpoint object"); + TraceManuallyBarrieredEdge(trc, &id, "WatchKey::id"); TraceEdge(trc, &entry.value().closure, "Watchpoint::closure"); - if (prior.object != key.object || prior.id != key.id) - e.rekeyFront(key); + if (priorObject != object || priorId != id) + e.rekeyFront(WatchKey(object, id)); } } |