summaryrefslogtreecommitdiffstats
path: root/js/src/gc
diff options
context:
space:
mode:
authorMatt A. Tobin <email@mattatobin.com>2018-02-03 15:50:22 -0500
committerMatt A. Tobin <email@mattatobin.com>2018-02-03 15:50:22 -0500
commit0083d404eff36f873cde465d50cd34b112bd124f (patch)
tree465b1f4322c3a2439903d9cabb068169bf6b42c9 /js/src/gc
parentc6856f968b8c85502f14c6c3412b00a05fc0c0de (diff)
parent10494e1b7d0b3cd945bb76dca10f5637cf786f27 (diff)
downloadUXP-0083d404eff36f873cde465d50cd34b112bd124f.tar
UXP-0083d404eff36f873cde465d50cd34b112bd124f.tar.gz
UXP-0083d404eff36f873cde465d50cd34b112bd124f.tar.lz
UXP-0083d404eff36f873cde465d50cd34b112bd124f.tar.xz
UXP-0083d404eff36f873cde465d50cd34b112bd124f.zip
Merge branch 'master' into configurebuild-work
Diffstat (limited to 'js/src/gc')
-rw-r--r--js/src/gc/Allocator.cpp7
-rw-r--r--js/src/gc/GCInternals.h45
-rw-r--r--js/src/gc/GCRuntime.h116
-rw-r--r--js/src/gc/Heap.h16
-rw-r--r--js/src/gc/Nursery.cpp91
-rw-r--r--js/src/gc/Nursery.h12
-rw-r--r--js/src/gc/Verifier.cpp414
-rw-r--r--js/src/gc/Zone.h6
8 files changed, 3 insertions, 704 deletions
diff --git a/js/src/gc/Allocator.cpp b/js/src/gc/Allocator.cpp
index f7dc50d02..3994d5a5b 100644
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -190,7 +190,7 @@ GCRuntime::checkAllocatorState(JSContext* cx, AllocKind kind)
return false;
}
-#if defined(JS_GC_ZEAL) || defined(DEBUG)
+#if defined(DEBUG)
MOZ_ASSERT_IF(cx->compartment()->isAtomsCompartment(),
kind == AllocKind::ATOM ||
kind == AllocKind::FAT_INLINE_ATOM ||
@@ -223,11 +223,6 @@ GCRuntime::checkAllocatorState(JSContext* cx, AllocKind kind)
bool
GCRuntime::gcIfNeededPerAllocation(JSContext* cx)
{
-#ifdef JS_GC_ZEAL
- if (needZealousGC())
- runDebugGC();
-#endif
-
// Invoking the interrupt callback can fail and we can't usefully
// handle that here. Just check in case we need to collect instead.
if (rt->hasPendingInterrupt())
diff --git a/js/src/gc/GCInternals.h b/js/src/gc/GCInternals.h
index 722539e1c..4919b87a5 100644
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -61,51 +61,6 @@ class MOZ_RAII AutoPrepareForTracing
AbortReason
IsIncrementalGCUnsafe(JSRuntime* rt);
-#ifdef JS_GC_ZEAL
-
-class MOZ_RAII AutoStopVerifyingBarriers
-{
- GCRuntime* gc;
- bool restartPreVerifier;
-
- public:
- AutoStopVerifyingBarriers(JSRuntime* rt, bool isShutdown)
- : gc(&rt->gc)
- {
- if (gc->isVerifyPreBarriersEnabled()) {
- gc->endVerifyPreBarriers();
- restartPreVerifier = !isShutdown;
- } else {
- restartPreVerifier = false;
- }
- }
-
- ~AutoStopVerifyingBarriers() {
- // Nasty special case: verification runs a minor GC, which *may* nest
- // inside of an outer minor GC. This is not allowed by the
- // gc::Statistics phase tree. So we pause the "real" GC, if in fact one
- // is in progress.
- gcstats::Phase outer = gc->stats.currentPhase();
- if (outer != gcstats::PHASE_NONE)
- gc->stats.endPhase(outer);
- MOZ_ASSERT((gc->stats.currentPhase() == gcstats::PHASE_NONE) ||
- (gc->stats.currentPhase() == gcstats::PHASE_GC_BEGIN) ||
- (gc->stats.currentPhase() == gcstats::PHASE_GC_END));
-
- if (restartPreVerifier)
- gc->startVerifyPreBarriers();
-
- if (outer != gcstats::PHASE_NONE)
- gc->stats.beginPhase(outer);
- }
-};
-#else
-struct MOZ_RAII AutoStopVerifyingBarriers
-{
- AutoStopVerifyingBarriers(JSRuntime*, bool) {}
-};
-#endif /* JS_GC_ZEAL */
-
#ifdef JSGC_HASH_TABLE_CHECKS
void CheckHashTablesAfterMovingGC(JSRuntime* rt);
void CheckHeapAfterGC(JSRuntime* rt);
diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h
index 8c9322849..19737c9ee 100644
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -600,11 +600,6 @@ class GCRuntime
void finishRoots();
void finish();
- inline bool hasZealMode(ZealMode mode);
- inline void clearZealMode(ZealMode mode);
- inline bool upcomingZealousGC();
- inline bool needZealousGC();
-
MOZ_MUST_USE bool addRoot(Value* vp, const char* name);
void removeRoot(Value* vp);
void setMarkStackLimit(size_t limit, AutoLockGC& lock);
@@ -638,7 +633,6 @@ class GCRuntime
MOZ_RELEASE_ASSERT(triggerGC(JS::gcreason::ALLOC_TRIGGER));
}
- void runDebugGC();
inline void poke();
enum TraceOrMarkRuntime {
@@ -653,19 +647,6 @@ class GCRuntime
void onOutOfMallocMemory();
void onOutOfMallocMemory(const AutoLockGC& lock);
-#ifdef JS_GC_ZEAL
- const void* addressOfZealModeBits() { return &zealModeBits; }
- void getZealBits(uint32_t* zealBits, uint32_t* frequency, uint32_t* nextScheduled);
- void setZeal(uint8_t zeal, uint32_t frequency);
- bool parseAndSetZeal(const char* str);
- void setNextScheduled(uint32_t count);
- void verifyPreBarriers();
- void maybeVerifyPreBarriers(bool always);
- bool selectForMarking(JSObject* object);
- void clearSelectedForMarking();
- void setDeterministic(bool enable);
-#endif
-
size_t maxMallocBytesAllocated() { return maxMallocBytes; }
uint64_t nextCellUniqueId() {
@@ -851,15 +832,6 @@ class GCRuntime
AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
void recycleChunk(Chunk* chunk, const AutoLockGC& lock);
-#ifdef JS_GC_ZEAL
- void startVerifyPreBarriers();
- void endVerifyPreBarriers();
- void finishVerifier();
- bool isVerifyPreBarriersEnabled() const { return !!verifyPreData; }
-#else
- bool isVerifyPreBarriersEnabled() const { return false; }
-#endif
-
// Free certain LifoAlloc blocks when it is safe to do so.
void freeUnusedLifoBlocksAfterSweeping(LifoAlloc* lifo);
void freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo);
@@ -948,7 +920,6 @@ class GCRuntime
void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason,
AutoLockForExclusiveAccess& lock);
- void pushZealSelectedObjects();
void purgeRuntime(AutoLockForExclusiveAccess& lock);
MOZ_MUST_USE bool beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock);
bool shouldPreserveJITCode(JSCompartment* comp, int64_t currentTime,
@@ -1004,10 +975,6 @@ class GCRuntime
void releaseRelocatedArenasWithoutUnlocking(Arena* arenaList, const AutoLockGC& lock);
void finishCollection(JS::gcreason::Reason reason);
- void computeNonIncrementalMarkingForValidation(AutoLockForExclusiveAccess& lock);
- void validateIncrementalMarking();
- void finishMarkingValidation();
-
#ifdef DEBUG
void checkForCompartmentMismatches();
#endif
@@ -1219,10 +1186,6 @@ class GCRuntime
ZoneList zonesToMaybeCompact;
Arena* relocatedArenasToRelease;
-#ifdef JS_GC_ZEAL
- MarkingValidator* markingValidator;
-#endif
-
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
@@ -1274,40 +1237,6 @@ class GCRuntime
bool poked;
- /*
- * These options control the zealousness of the GC. At every allocation,
- * nextScheduled is decremented. When it reaches zero we do a full GC.
- *
- * At this point, if zeal_ is one of the types that trigger periodic
- * collection, then nextScheduled is reset to the value of zealFrequency.
- * Otherwise, no additional GCs take place.
- *
- * You can control these values in several ways:
- * - Set the JS_GC_ZEAL environment variable
- * - Call gczeal() or schedulegc() from inside shell-executed JS code
- * (see the help for details)
- *
- * If gcZeal_ == 1 then we perform GCs in select places (during MaybeGC and
- * whenever a GC poke happens). This option is mainly useful to embedders.
- *
- * We use zeal_ == 4 to enable write barrier verification. See the comment
- * in jsgc.cpp for more information about this.
- *
- * zeal_ values from 8 to 10 periodically run different types of
- * incremental GC.
- *
- * zeal_ value 14 performs periodic shrinking collections.
- */
-#ifdef JS_GC_ZEAL
- uint32_t zealModeBits;
- int zealFrequency;
- int nextScheduled;
- bool deterministicOnly;
- int incrementalLimit;
-
- Vector<JSObject*, 0, SystemAllocPolicy> selectedForMarking;
-#endif
-
bool fullCompartmentChecks;
Callback<JSGCCallback> gcCallback;
@@ -1415,51 +1344,6 @@ class MOZ_RAII AutoMaybeStartBackgroundAllocation
}
};
-#ifdef JS_GC_ZEAL
-
-inline bool
-GCRuntime::hasZealMode(ZealMode mode)
-{
- static_assert(size_t(ZealMode::Limit) < sizeof(zealModeBits) * 8,
- "Zeal modes must fit in zealModeBits");
- return zealModeBits & (1 << uint32_t(mode));
-}
-
-inline void
-GCRuntime::clearZealMode(ZealMode mode)
-{
- zealModeBits &= ~(1 << uint32_t(mode));
- MOZ_ASSERT(!hasZealMode(mode));
-}
-
-inline bool
-GCRuntime::upcomingZealousGC() {
- return nextScheduled == 1;
-}
-
-inline bool
-GCRuntime::needZealousGC() {
- if (nextScheduled > 0 && --nextScheduled == 0) {
- if (hasZealMode(ZealMode::Alloc) ||
- hasZealMode(ZealMode::GenerationalGC) ||
- hasZealMode(ZealMode::IncrementalRootsThenFinish) ||
- hasZealMode(ZealMode::IncrementalMarkAllThenFinish) ||
- hasZealMode(ZealMode::IncrementalMultipleSlices) ||
- hasZealMode(ZealMode::Compact))
- {
- nextScheduled = zealFrequency;
- }
- return true;
- }
- return false;
-}
-#else
-inline bool GCRuntime::hasZealMode(ZealMode mode) { return false; }
-inline void GCRuntime::clearZealMode(ZealMode mode) { }
-inline bool GCRuntime::upcomingZealousGC() { return false; }
-inline bool GCRuntime::needZealousGC() { return false; }
-#endif
-
} /* namespace gc */
} /* namespace js */
diff --git a/js/src/gc/Heap.h b/js/src/gc/Heap.h
index 2a9390e91..697803380 100644
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -1322,22 +1322,6 @@ TenuredCell::writeBarrierPre(TenuredCell* thing)
if (!thing)
return;
-#ifdef JS_GC_ZEAL
- // When verifying pre barriers we need to switch on all barriers, even
- // those on the Atoms Zone. Normally, we never enter a parse task when
- // collecting in the atoms zone, so will filter out atoms below.
- // Unfortuantely, If we try that when verifying pre-barriers, we'd never be
- // able to handle OMT parse tasks at all as we switch on the verifier any
- // time we're not doing GC. This would cause us to deadlock, as OMT parsing
- // is meant to resume after GC work completes. Instead we filter out any
- // OMT barriers that reach us and assert that they would normally not be
- // possible.
- if (!CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread())) {
- AssertSafeToSkipBarrier(thing);
- return;
- }
-#endif
-
JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
diff --git a/js/src/gc/Nursery.cpp b/js/src/gc/Nursery.cpp
index 2c402fe0b..bce2b74aa 100644
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -74,14 +74,6 @@ struct js::Nursery::SweepAction
#endif
};
-#ifdef JS_GC_ZEAL
-struct js::Nursery::Canary
-{
- uintptr_t magicValue;
- Canary* next;
-};
-#endif
-
inline void
js::Nursery::NurseryChunk::poisonAndInit(JSRuntime* rt, uint8_t poison)
{
@@ -124,9 +116,6 @@ js::Nursery::Nursery(JSRuntime* rt)
, minorGcCount_(0)
, freeMallocedBuffersTask(nullptr)
, sweepActions_(nullptr)
-#ifdef JS_GC_ZEAL
- , lastCanary_(nullptr)
-#endif
{}
bool
@@ -199,7 +188,6 @@ void
js::Nursery::enable()
{
MOZ_ASSERT(isEmpty());
- MOZ_ASSERT(!runtime()->gc.isVerifyPreBarriersEnabled());
if (isEnabled())
return;
@@ -209,10 +197,6 @@ js::Nursery::enable()
setCurrentChunk(0);
setStartPosition();
-#ifdef JS_GC_ZEAL
- if (runtime()->hasZealMode(ZealMode::GenerationalGC))
- enterZealMode();
-#endif
MOZ_ALWAYS_TRUE(runtime()->gc.storeBuffer.enable());
return;
@@ -235,31 +219,11 @@ js::Nursery::isEmpty() const
MOZ_ASSERT(runtime_);
if (!isEnabled())
return true;
-
- if (!runtime_->hasZealMode(ZealMode::GenerationalGC)) {
- MOZ_ASSERT(currentStartChunk_ == 0);
- MOZ_ASSERT(currentStartPosition_ == chunk(0).start());
- }
+ MOZ_ASSERT(currentStartChunk_ == 0);
+ MOZ_ASSERT(currentStartPosition_ == chunk(0).start());
return position() == currentStartPosition_;
}
-#ifdef JS_GC_ZEAL
-void
-js::Nursery::enterZealMode() {
- if (isEnabled())
- updateNumChunks(maxNurseryChunks_);
-}
-
-void
-js::Nursery::leaveZealMode() {
- if (isEnabled()) {
- MOZ_ASSERT(isEmpty());
- setCurrentChunk(0);
- setStartPosition();
- }
-}
-#endif // JS_GC_ZEAL
-
JSObject*
js::Nursery::allocateObject(JSContext* cx, size_t size, size_t numDynamic, const js::Class* clasp)
{
@@ -305,12 +269,6 @@ js::Nursery::allocate(size_t size)
MOZ_ASSERT(position() % gc::CellSize == 0);
MOZ_ASSERT(size % gc::CellSize == 0);
-#ifdef JS_GC_ZEAL
- static const size_t CanarySize = (sizeof(Nursery::Canary) + CellSize - 1) & ~CellMask;
- if (runtime()->gc.hasZealMode(ZealMode::CheckNursery))
- size += CanarySize;
-#endif
-
if (currentEnd() < position() + size) {
if (currentChunk_ + 1 == numChunks())
return nullptr;
@@ -322,19 +280,6 @@ js::Nursery::allocate(size_t size)
JS_EXTRA_POISON(thing, JS_ALLOCATED_NURSERY_PATTERN, size);
-#ifdef JS_GC_ZEAL
- if (runtime()->gc.hasZealMode(ZealMode::CheckNursery)) {
- auto canary = reinterpret_cast<Canary*>(position() - CanarySize);
- canary->magicValue = CanaryMagicValue;
- canary->next = nullptr;
- if (lastCanary_) {
- MOZ_ASSERT(!lastCanary_->next);
- lastCanary_->next = canary;
- }
- lastCanary_ = canary;
- }
-#endif
-
MemProfiler::SampleNursery(reinterpret_cast<void*>(thing), size);
return thing;
}
@@ -561,14 +506,6 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason)
rt->gc.incMinorGcNumber();
-#ifdef JS_GC_ZEAL
- if (rt->gc.hasZealMode(ZealMode::CheckNursery)) {
- for (auto canary = lastCanary_; canary; canary = canary->next)
- MOZ_ASSERT(canary->magicValue == CanaryMagicValue);
- }
- lastCanary_ = nullptr;
-#endif
-
rt->gc.stats.beginNurseryCollection(reason);
TraceMinorGCStart();
@@ -659,7 +596,6 @@ js::Nursery::doCollection(JSRuntime* rt, JS::gcreason::Reason reason,
{
AutoTraceSession session(rt, JS::HeapState::MinorCollecting);
AutoSetThreadIsPerformingGC performingGC;
- AutoStopVerifyingBarriers av(rt, false);
AutoDisableProxyCheck disableStrictProxyChecking(rt);
mozilla::DebugOnly<AutoEnterOOMUnsafeRegion> oomUnsafeRegion;
@@ -752,10 +688,6 @@ js::Nursery::doCollection(JSRuntime* rt, JS::gcreason::Reason reason,
// Make sure hashtables have been updated after the collection.
maybeStartProfile(ProfileKey::CheckHashTables);
-#ifdef JS_GC_ZEAL
- if (rt->hasZealMode(ZealMode::CheckHashTablesOnMinorGC))
- CheckHashTablesAfterMovingGC(rt);
-#endif
maybeEndProfile(ProfileKey::CheckHashTables);
// Calculate and return the promotion rate.
@@ -828,17 +760,6 @@ js::Nursery::sweep()
runSweepActions();
sweepDictionaryModeObjects();
-#ifdef JS_GC_ZEAL
- /* Poison the nursery contents so touching a freed object will crash. */
- for (unsigned i = 0; i < numChunks(); i++)
- chunk(i).poisonAndInit(runtime(), JS_SWEPT_NURSERY_PATTERN);
-
- if (runtime()->hasZealMode(ZealMode::GenerationalGC)) {
- /* Only reset the alloc point when we are close to the end. */
- if (currentChunk_ + 1 == numChunks())
- setCurrentChunk(0);
- } else
-#endif
{
#ifdef JS_CRASH_DIAGNOSTICS
for (unsigned i = 0; i < numChunks(); ++i)
@@ -916,20 +837,12 @@ js::Nursery::growAllocableSpace()
void
js::Nursery::shrinkAllocableSpace()
{
-#ifdef JS_GC_ZEAL
- if (runtime()->hasZealMode(ZealMode::GenerationalGC))
- return;
-#endif
updateNumChunks(Max(numChunks() - 1, 1u));
}
void
js::Nursery::minimizeAllocableSpace()
{
-#ifdef JS_GC_ZEAL
- if (runtime()->hasZealMode(ZealMode::GenerationalGC))
- return;
-#endif
updateNumChunks(1);
}
diff --git a/js/src/gc/Nursery.h b/js/src/gc/Nursery.h
index 69fb66b7a..0d215d997 100644
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -58,8 +58,6 @@ class NativeObject;
class Nursery;
class HeapSlot;
-void SetGCZeal(JSRuntime*, uint8_t, uint32_t);
-
namespace gc {
class AutoMaybeStartBackgroundAllocation;
struct Cell;
@@ -252,11 +250,6 @@ class Nursery
(numChunks() - currentChunk_ - 1) * NurseryChunkUsableSize;
}
-#ifdef JS_GC_ZEAL
- void enterZealMode();
- void leaveZealMode();
-#endif
-
/* Print total profile times on shutdown. */
void printTotalProfileTimes();
@@ -374,11 +367,6 @@ class Nursery
using NativeObjectVector = Vector<NativeObject*, 0, SystemAllocPolicy>;
NativeObjectVector dictionaryModeObjects_;
-#ifdef JS_GC_ZEAL
- struct Canary;
- Canary* lastCanary_;
-#endif
-
NurseryChunk* allocChunk();
NurseryChunk& chunk(unsigned index) const {
diff --git a/js/src/gc/Verifier.cpp b/js/src/gc/Verifier.cpp
index dd4031606..3ebbbb4f6 100644
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -26,420 +26,6 @@
using namespace js;
using namespace js::gc;
-#ifdef JS_GC_ZEAL
-
-/*
- * Write barrier verification
- *
- * The next few functions are for write barrier verification.
- *
- * The VerifyBarriers function is a shorthand. It checks if a verification phase
- * is currently running. If not, it starts one. Otherwise, it ends the current
- * phase and starts a new one.
- *
- * The user can adjust the frequency of verifications, which causes
- * VerifyBarriers to be a no-op all but one out of N calls. However, if the
- * |always| parameter is true, it starts a new phase no matter what.
- *
- * Pre-Barrier Verifier:
- * When StartVerifyBarriers is called, a snapshot is taken of all objects in
- * the GC heap and saved in an explicit graph data structure. Later,
- * EndVerifyBarriers traverses the heap again. Any pointer values that were in
- * the snapshot and are no longer found must be marked; otherwise an assertion
- * triggers. Note that we must not GC in between starting and finishing a
- * verification phase.
- */
-
-struct EdgeValue
-{
- void* thing;
- JS::TraceKind kind;
- const char* label;
-};
-
-struct VerifyNode
-{
- void* thing;
- JS::TraceKind kind;
- uint32_t count;
- EdgeValue edges[1];
-};
-
-typedef HashMap<void*, VerifyNode*, DefaultHasher<void*>, SystemAllocPolicy> NodeMap;
-
-/*
- * The verifier data structures are simple. The entire graph is stored in a
- * single block of memory. At the beginning is a VerifyNode for the root
- * node. It is followed by a sequence of EdgeValues--the exact number is given
- * in the node. After the edges come more nodes and their edges.
- *
- * The edgeptr and term fields are used to allocate out of the block of memory
- * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
- * we just abandon the verification.
- *
- * The nodemap field is a hashtable that maps from the address of the GC thing
- * to the VerifyNode that represents it.
- */
-class js::VerifyPreTracer final : public JS::CallbackTracer
-{
- JS::AutoDisableGenerationalGC noggc;
-
- void onChild(const JS::GCCellPtr& thing) override;
-
- public:
- /* The gcNumber when the verification began. */
- uint64_t number;
-
- /* This counts up to gcZealFrequency to decide whether to verify. */
- int count;
-
- /* This graph represents the initial GC "snapshot". */
- VerifyNode* curnode;
- VerifyNode* root;
- char* edgeptr;
- char* term;
- NodeMap nodemap;
-
- explicit VerifyPreTracer(JSRuntime* rt)
- : JS::CallbackTracer(rt), noggc(rt), number(rt->gc.gcNumber()), count(0), curnode(nullptr),
- root(nullptr), edgeptr(nullptr), term(nullptr)
- {}
-
- ~VerifyPreTracer() {
- js_free(root);
- }
-};
-
-/*
- * This function builds up the heap snapshot by adding edges to the current
- * node.
- */
-void
-VerifyPreTracer::onChild(const JS::GCCellPtr& thing)
-{
- MOZ_ASSERT(!IsInsideNursery(thing.asCell()));
-
- // Skip things in other runtimes.
- if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime())
- return;
-
- edgeptr += sizeof(EdgeValue);
- if (edgeptr >= term) {
- edgeptr = term;
- return;
- }
-
- VerifyNode* node = curnode;
- uint32_t i = node->count;
-
- node->edges[i].thing = thing.asCell();
- node->edges[i].kind = thing.kind();
- node->edges[i].label = contextName();
- node->count++;
-}
-
-static VerifyNode*
-MakeNode(VerifyPreTracer* trc, void* thing, JS::TraceKind kind)
-{
- NodeMap::AddPtr p = trc->nodemap.lookupForAdd(thing);
- if (!p) {
- VerifyNode* node = (VerifyNode*)trc->edgeptr;
- trc->edgeptr += sizeof(VerifyNode) - sizeof(EdgeValue);
- if (trc->edgeptr >= trc->term) {
- trc->edgeptr = trc->term;
- return nullptr;
- }
-
- node->thing = thing;
- node->count = 0;
- node->kind = kind;
- if (!trc->nodemap.add(p, thing, node)) {
- trc->edgeptr = trc->term;
- return nullptr;
- }
-
- return node;
- }
- return nullptr;
-}
-
-static VerifyNode*
-NextNode(VerifyNode* node)
-{
- if (node->count == 0)
- return (VerifyNode*)((char*)node + sizeof(VerifyNode) - sizeof(EdgeValue));
- else
- return (VerifyNode*)((char*)node + sizeof(VerifyNode) +
- sizeof(EdgeValue)*(node->count - 1));
-}
-
-void
-gc::GCRuntime::startVerifyPreBarriers()
-{
- if (verifyPreData || isIncrementalGCInProgress())
- return;
-
- if (IsIncrementalGCUnsafe(rt) != AbortReason::None)
- return;
-
- number++;
-
- VerifyPreTracer* trc = js_new<VerifyPreTracer>(rt);
- if (!trc)
- return;
-
- AutoPrepareForTracing prep(rt->contextFromMainThread(), WithAtoms);
-
- for (auto chunk = allNonEmptyChunks(); !chunk.done(); chunk.next())
- chunk->bitmap.clear();
-
- gcstats::AutoPhase ap(stats, gcstats::PHASE_TRACE_HEAP);
-
- const size_t size = 64 * 1024 * 1024;
- trc->root = (VerifyNode*)js_malloc(size);
- if (!trc->root)
- goto oom;
- trc->edgeptr = (char*)trc->root;
- trc->term = trc->edgeptr + size;
-
- if (!trc->nodemap.init())
- goto oom;
-
- /* Create the root node. */
- trc->curnode = MakeNode(trc, nullptr, JS::TraceKind(0));
-
- incrementalState = State::MarkRoots;
-
- /* Make all the roots be edges emanating from the root node. */
- traceRuntime(trc, prep.session().lock);
-
- VerifyNode* node;
- node = trc->curnode;
- if (trc->edgeptr == trc->term)
- goto oom;
-
- /* For each edge, make a node for it if one doesn't already exist. */
- while ((char*)node < trc->edgeptr) {
- for (uint32_t i = 0; i < node->count; i++) {
- EdgeValue& e = node->edges[i];
- VerifyNode* child = MakeNode(trc, e.thing, e.kind);
- if (child) {
- trc->curnode = child;
- js::TraceChildren(trc, e.thing, e.kind);
- }
- if (trc->edgeptr == trc->term)
- goto oom;
- }
-
- node = NextNode(node);
- }
-
- verifyPreData = trc;
- incrementalState = State::Mark;
- marker.start();
-
- for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
- PurgeJITCaches(zone);
- if (!zone->usedByExclusiveThread) {
- zone->setNeedsIncrementalBarrier(true, Zone::UpdateJit);
- zone->arenas.purge();
- }
- }
-
- return;
-
-oom:
- incrementalState = State::NotActive;
- js_delete(trc);
- verifyPreData = nullptr;
-}
-
-static bool
-IsMarkedOrAllocated(TenuredCell* cell)
-{
- return cell->isMarked() || cell->arena()->allocatedDuringIncremental;
-}
-
-struct CheckEdgeTracer : public JS::CallbackTracer {
- VerifyNode* node;
- explicit CheckEdgeTracer(JSRuntime* rt) : JS::CallbackTracer(rt), node(nullptr) {}
- void onChild(const JS::GCCellPtr& thing) override;
-};
-
-static const uint32_t MAX_VERIFIER_EDGES = 1000;
-
-/*
- * This function is called by EndVerifyBarriers for every heap edge. If the edge
- * already existed in the original snapshot, we "cancel it out" by overwriting
- * it with nullptr. EndVerifyBarriers later asserts that the remaining
- * non-nullptr edges (i.e., the ones from the original snapshot that must have
- * been modified) must point to marked objects.
- */
-void
-CheckEdgeTracer::onChild(const JS::GCCellPtr& thing)
-{
- // Skip things in other runtimes.
- if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime())
- return;
-
- /* Avoid n^2 behavior. */
- if (node->count > MAX_VERIFIER_EDGES)
- return;
-
- for (uint32_t i = 0; i < node->count; i++) {
- if (node->edges[i].thing == thing.asCell()) {
- MOZ_ASSERT(node->edges[i].kind == thing.kind());
- node->edges[i].thing = nullptr;
- return;
- }
- }
-}
-
-void
-js::gc::AssertSafeToSkipBarrier(TenuredCell* thing)
-{
- Zone* zone = thing->zoneFromAnyThread();
- MOZ_ASSERT(!zone->needsIncrementalBarrier() || zone->isAtomsZone());
-}
-
-static bool
-IsMarkedOrAllocated(const EdgeValue& edge)
-{
- if (!edge.thing || IsMarkedOrAllocated(TenuredCell::fromPointer(edge.thing)))
- return true;
-
- // Permanent atoms and well-known symbols aren't marked during graph traversal.
- if (edge.kind == JS::TraceKind::String && static_cast<JSString*>(edge.thing)->isPermanentAtom())
- return true;
- if (edge.kind == JS::TraceKind::Symbol && static_cast<JS::Symbol*>(edge.thing)->isWellKnownSymbol())
- return true;
-
- return false;
-}
-
-void
-gc::GCRuntime::endVerifyPreBarriers()
-{
- VerifyPreTracer* trc = verifyPreData;
-
- if (!trc)
- return;
-
- MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
-
- AutoPrepareForTracing prep(rt->contextFromMainThread(), SkipAtoms);
-
- bool compartmentCreated = false;
-
- /* We need to disable barriers before tracing, which may invoke barriers. */
- for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
- if (!zone->needsIncrementalBarrier())
- compartmentCreated = true;
-
- zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit);
- PurgeJITCaches(zone);
- }
-
- /*
- * We need to bump gcNumber so that the methodjit knows that jitcode has
- * been discarded.
- */
- MOZ_ASSERT(trc->number == number);
- number++;
-
- verifyPreData = nullptr;
- incrementalState = State::NotActive;
-
- if (!compartmentCreated && IsIncrementalGCUnsafe(rt) == AbortReason::None) {
- CheckEdgeTracer cetrc(rt);
-
- /* Start after the roots. */
- VerifyNode* node = NextNode(trc->root);
- while ((char*)node < trc->edgeptr) {
- cetrc.node = node;
- js::TraceChildren(&cetrc, node->thing, node->kind);
-
- if (node->count <= MAX_VERIFIER_EDGES) {
- for (uint32_t i = 0; i < node->count; i++) {
- EdgeValue& edge = node->edges[i];
- if (!IsMarkedOrAllocated(edge)) {
- char msgbuf[1024];
- SprintfLiteral(msgbuf,
- "[barrier verifier] Unmarked edge: %s %p '%s' edge to %s %p",
- JS::GCTraceKindToAscii(node->kind), node->thing,
- edge.label,
- JS::GCTraceKindToAscii(edge.kind), edge.thing);
- MOZ_ReportAssertionFailure(msgbuf, __FILE__, __LINE__);
- MOZ_CRASH();
- }
- }
- }
-
- node = NextNode(node);
- }
- }
-
- marker.reset();
- marker.stop();
-
- js_delete(trc);
-}
-
-/*** Barrier Verifier Scheduling ***/
-
-void
-gc::GCRuntime::verifyPreBarriers()
-{
- if (verifyPreData)
- endVerifyPreBarriers();
- else
- startVerifyPreBarriers();
-}
-
-void
-gc::VerifyBarriers(JSRuntime* rt, VerifierType type)
-{
- if (type == PreBarrierVerifier)
- rt->gc.verifyPreBarriers();
-}
-
-void
-gc::GCRuntime::maybeVerifyPreBarriers(bool always)
-{
- if (!hasZealMode(ZealMode::VerifierPre))
- return;
-
- if (rt->mainThread.suppressGC)
- return;
-
- if (verifyPreData) {
- if (++verifyPreData->count < zealFrequency && !always)
- return;
-
- endVerifyPreBarriers();
- }
-
- startVerifyPreBarriers();
-}
-
-void
-js::gc::MaybeVerifyBarriers(JSContext* cx, bool always)
-{
- GCRuntime* gc = &cx->runtime()->gc;
- gc->maybeVerifyPreBarriers(always);
-}
-
-void
-js::gc::GCRuntime::finishVerifier()
-{
- if (verifyPreData) {
- js_delete(verifyPreData);
- verifyPreData = nullptr;
- }
-}
-
-#endif /* JS_GC_ZEAL */
-
#ifdef JSGC_HASH_TABLE_CHECKS
class CheckHeapTracer : public JS::CallbackTracer
diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h
index a3a6dc07f..50d06319d 100644
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -253,12 +253,6 @@ struct Zone : public JS::shadow::Zone,
// possibly at other times too.
uint64_t gcNumber();
- bool compileBarriers() const { return compileBarriers(needsIncrementalBarrier()); }
- bool compileBarriers(bool needsIncrementalBarrier) const {
- return needsIncrementalBarrier ||
- runtimeFromMainThread()->hasZealMode(js::gc::ZealMode::VerifierPre);
- }
-
enum ShouldUpdateJit { DontUpdateJit, UpdateJit };
void setNeedsIncrementalBarrier(bool needs, ShouldUpdateJit updateJit);
const bool* addressOfNeedsIncrementalBarrier() const { return &needsIncrementalBarrier_; }