summaryrefslogtreecommitdiffstats
path: root/js/src/gc
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/gc')
-rw-r--r--js/src/gc/Barrier.h27
-rw-r--r--js/src/gc/GCInternals.h5
-rw-r--r--js/src/gc/GCRuntime.h4
-rw-r--r--js/src/gc/Marking.cpp85
-rw-r--r--js/src/gc/Memory.cpp102
-rw-r--r--js/src/gc/Nursery.cpp9
-rw-r--r--js/src/gc/Nursery.h1
-rw-r--r--js/src/gc/RootMarking.cpp3
-rw-r--r--js/src/gc/Statistics.cpp219
-rw-r--r--js/src/gc/Statistics.h18
-rw-r--r--js/src/gc/Tracer.cpp69
-rw-r--r--js/src/gc/Zone.cpp15
-rw-r--r--js/src/gc/Zone.h11
13 files changed, 141 insertions, 427 deletions
diff --git a/js/src/gc/Barrier.h b/js/src/gc/Barrier.h
index effc9233e..dce3b2a20 100644
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -667,29 +667,15 @@ class HeapSlot : public WriteBarrieredBase<Value>
Element = 1
};
- explicit HeapSlot() = delete;
-
- explicit HeapSlot(NativeObject* obj, Kind kind, uint32_t slot, const Value& v)
- : WriteBarrieredBase<Value>(v)
- {
- post(obj, kind, slot, v);
- }
-
- explicit HeapSlot(NativeObject* obj, Kind kind, uint32_t slot, const HeapSlot& s)
- : WriteBarrieredBase<Value>(s.value)
- {
- post(obj, kind, slot, s);
- }
-
- ~HeapSlot() {
- pre();
- }
-
void init(NativeObject* owner, Kind kind, uint32_t slot, const Value& v) {
value = v;
post(owner, kind, slot, v);
}
+ void destroy() {
+ pre();
+ }
+
#ifdef DEBUG
bool preconditionForSet(NativeObject* owner, Kind kind, uint32_t slot) const;
bool preconditionForWriteBarrierPost(NativeObject* obj, Kind kind, uint32_t slot,
@@ -703,11 +689,6 @@ class HeapSlot : public WriteBarrieredBase<Value>
post(owner, kind, slot, v);
}
- /* For users who need to manually barrier the raw types. */
- static void writeBarrierPost(NativeObject* owner, Kind kind, uint32_t slot, const Value& target) {
- reinterpret_cast<HeapSlot*>(const_cast<Value*>(&target))->post(owner, kind, slot, target);
- }
-
private:
void post(NativeObject* owner, Kind kind, uint32_t slot, const Value& target) {
MOZ_ASSERT(preconditionForWriteBarrierPost(owner, kind, slot, target));
diff --git a/js/src/gc/GCInternals.h b/js/src/gc/GCInternals.h
index 4919b87a5..e8df0bb70 100644
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -9,7 +9,6 @@
#include "mozilla/ArrayUtils.h"
#include "mozilla/Maybe.h"
-#include "mozilla/PodOperations.h"
#include "jscntxt.h"
@@ -102,9 +101,9 @@ struct TenureCountCache
static const size_t EntryShift = 4;
static const size_t EntryCount = 1 << EntryShift;
- TenureCount entries[EntryCount];
+ TenureCount entries[EntryCount] = {}; // zeroes
- TenureCountCache() { mozilla::PodZero(this); }
+ TenureCountCache() = default;
HashNumber hash(ObjectGroup* group) {
#if JS_BITS_PER_WORD == 32
diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h
index 5c2576efd..f102e9ef0 100644
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -900,7 +900,8 @@ class GCRuntime
void requestMajorGC(JS::gcreason::Reason reason);
SliceBudget defaultBudget(JS::gcreason::Reason reason, int64_t millis);
- void budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& lock);
+ void budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget,
+ AutoLockForExclusiveAccess& lock);
void resetIncrementalGC(AbortReason reason, AutoLockForExclusiveAccess& lock);
// Assert if the system state is such that we should never
@@ -915,6 +916,7 @@ class GCRuntime
void collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) JS_HAZ_GC_CALL;
MOZ_MUST_USE bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget,
JS::gcreason::Reason reason);
+ bool shouldRepeatForDeadZone(JS::gcreason::Reason reason);
void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason,
AutoLockForExclusiveAccess& lock);
diff --git a/js/src/gc/Marking.cpp b/js/src/gc/Marking.cpp
index b2c105999..262fc8cbc 100644
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -18,6 +18,7 @@
#include "builtin/ModuleObject.h"
#include "gc/GCInternals.h"
#include "gc/Policy.h"
+#include "gc/StoreBuffer-inl.h"
#include "jit/IonCode.h"
#include "js/SliceBudget.h"
#include "vm/ArgumentsObject.h"
@@ -28,7 +29,6 @@
#include "vm/Shape.h"
#include "vm/Symbol.h"
#include "vm/TypedArrayObject.h"
-#include "vm/UnboxedObject.h"
#include "wasm/WasmJS.h"
#include "jscompartmentinlines.h"
@@ -37,7 +37,6 @@
#include "gc/Nursery-inl.h"
#include "vm/String-inl.h"
-#include "vm/UnboxedObject-inl.h"
using namespace js;
using namespace js::gc;
@@ -1231,34 +1230,34 @@ BindingIter::trace(JSTracer* trc)
void
LexicalScope::Data::trace(JSTracer* trc)
{
- TraceBindingNames(trc, names, length);
+ TraceBindingNames(trc, trailingNames.start(), length);
}
void
FunctionScope::Data::trace(JSTracer* trc)
{
TraceNullableEdge(trc, &canonicalFunction, "scope canonical function");
- TraceNullableBindingNames(trc, names, length);
+ TraceNullableBindingNames(trc, trailingNames.start(), length);
}
void
VarScope::Data::trace(JSTracer* trc)
{
- TraceBindingNames(trc, names, length);
+ TraceBindingNames(trc, trailingNames.start(), length);
}
void
GlobalScope::Data::trace(JSTracer* trc)
{
- TraceBindingNames(trc, names, length);
+ TraceBindingNames(trc, trailingNames.start(), length);
}
void
EvalScope::Data::trace(JSTracer* trc)
{
- TraceBindingNames(trc, names, length);
+ TraceBindingNames(trc, trailingNames.start(), length);
}
void
ModuleScope::Data::trace(JSTracer* trc)
{
TraceNullableEdge(trc, &module, "scope module");
- TraceBindingNames(trc, names, length);
+ TraceBindingNames(trc, trailingNames.start(), length);
}
void
Scope::traceChildren(JSTracer* trc)
@@ -1302,13 +1301,13 @@ js::GCMarker::eagerlyMarkChildren(Scope* scope)
traverseEdge(scope, static_cast<Scope*>(scope->enclosing_));
if (scope->environmentShape_)
traverseEdge(scope, static_cast<Shape*>(scope->environmentShape_));
- BindingName* names = nullptr;
+ TrailingNamesArray* names = nullptr;
uint32_t length = 0;
switch (scope->kind_) {
case ScopeKind::Function: {
FunctionScope::Data* data = reinterpret_cast<FunctionScope::Data*>(scope->data_);
traverseEdge(scope, static_cast<JSObject*>(data->canonicalFunction));
- names = data->names;
+ names = &data->trailingNames;
length = data->length;
break;
}
@@ -1316,7 +1315,7 @@ js::GCMarker::eagerlyMarkChildren(Scope* scope)
case ScopeKind::FunctionBodyVar:
case ScopeKind::ParameterExpressionVar: {
VarScope::Data* data = reinterpret_cast<VarScope::Data*>(scope->data_);
- names = data->names;
+ names = &data->trailingNames;
length = data->length;
break;
}
@@ -1327,7 +1326,7 @@ js::GCMarker::eagerlyMarkChildren(Scope* scope)
case ScopeKind::NamedLambda:
case ScopeKind::StrictNamedLambda: {
LexicalScope::Data* data = reinterpret_cast<LexicalScope::Data*>(scope->data_);
- names = data->names;
+ names = &data->trailingNames;
length = data->length;
break;
}
@@ -1335,7 +1334,7 @@ js::GCMarker::eagerlyMarkChildren(Scope* scope)
case ScopeKind::Global:
case ScopeKind::NonSyntactic: {
GlobalScope::Data* data = reinterpret_cast<GlobalScope::Data*>(scope->data_);
- names = data->names;
+ names = &data->trailingNames;
length = data->length;
break;
}
@@ -1343,7 +1342,7 @@ js::GCMarker::eagerlyMarkChildren(Scope* scope)
case ScopeKind::Eval:
case ScopeKind::StrictEval: {
EvalScope::Data* data = reinterpret_cast<EvalScope::Data*>(scope->data_);
- names = data->names;
+ names = &data->trailingNames;
length = data->length;
break;
}
@@ -1351,7 +1350,7 @@ js::GCMarker::eagerlyMarkChildren(Scope* scope)
case ScopeKind::Module: {
ModuleScope::Data* data = reinterpret_cast<ModuleScope::Data*>(scope->data_);
traverseEdge(scope, static_cast<JSObject*>(data->module));
- names = data->names;
+ names = &data->trailingNames;
length = data->length;
break;
}
@@ -1361,12 +1360,12 @@ js::GCMarker::eagerlyMarkChildren(Scope* scope)
}
if (scope->kind_ == ScopeKind::Function) {
for (uint32_t i = 0; i < length; i++) {
- if (JSAtom* name = names[i].name())
+ if (JSAtom* name = names->operator[](i).name())
traverseEdge(scope, static_cast<JSString*>(name));
}
} else {
for (uint32_t i = 0; i < length; i++)
- traverseEdge(scope, static_cast<JSString*>(names[i].name()));
+ traverseEdge(scope, static_cast<JSString*>(names->operator[](i).name()));
}
}
@@ -1395,14 +1394,6 @@ js::ObjectGroup::traceChildren(JSTracer* trc)
if (maybePreliminaryObjects())
maybePreliminaryObjects()->trace(trc);
- if (maybeUnboxedLayout())
- unboxedLayout().trace(trc);
-
- if (ObjectGroup* unboxedGroup = maybeOriginalUnboxedGroup()) {
- TraceManuallyBarrieredEdge(trc, &unboxedGroup, "group_original_unboxed_group");
- setOriginalUnboxedGroup(unboxedGroup);
- }
-
if (JSObject* descr = maybeTypeDescr()) {
TraceManuallyBarrieredEdge(trc, &descr, "group_type_descr");
setTypeDescr(&descr->as<TypeDescr>());
@@ -1436,12 +1427,6 @@ js::GCMarker::lazilyMarkChildren(ObjectGroup* group)
if (group->maybePreliminaryObjects())
group->maybePreliminaryObjects()->trace(this);
- if (group->maybeUnboxedLayout())
- group->unboxedLayout().trace(this);
-
- if (ObjectGroup* unboxedGroup = group->maybeOriginalUnboxedGroup())
- traverseEdge(group, unboxedGroup);
-
if (TypeDescr* descr = group->maybeTypeDescr())
traverseEdge(group, static_cast<JSObject*>(descr));
@@ -1484,23 +1469,6 @@ CallTraceHook(Functor f, JSTracer* trc, JSObject* obj, CheckGeneration check, Ar
return nullptr;
}
- if (clasp == &UnboxedPlainObject::class_) {
- JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando();
- if (*pexpando)
- f(pexpando, mozilla::Forward<Args>(args)...);
-
- UnboxedPlainObject& unboxed = obj->as<UnboxedPlainObject>();
- const UnboxedLayout& layout = check == CheckGeneration::DoChecks
- ? unboxed.layout()
- : unboxed.layoutDontCheckGeneration();
- if (layout.traceList()) {
- VisitTraceList(f, layout.traceList(), unboxed.data(),
- mozilla::Forward<Args>(args)...);
- }
-
- return nullptr;
- }
-
clasp->doTrace(trc, obj);
if (!clasp->isNative())
@@ -2293,18 +2261,6 @@ static inline void
TraceWholeCell(TenuringTracer& mover, JSObject* object)
{
mover.traceObject(object);
-
- // Additionally trace the expando object attached to any unboxed plain
- // objects. Baseline and Ion can write properties to the expando while
- // only adding a post barrier to the owning unboxed object. Note that
- // it isn't possible for a nursery unboxed object to have a tenured
- // expando, so that adding a post barrier on the original object will
- // capture any tenured->nursery edges in the expando as well.
-
- if (object->is<UnboxedPlainObject>()) {
- if (UnboxedExpandoObject* expando = object->as<UnboxedPlainObject>().maybeExpando())
- expando->traceChildren(&mover);
- }
}
static inline void
@@ -2548,8 +2504,6 @@ js::TenuringTracer::moveObjectToTenured(JSObject* dst, JSObject* src, AllocKind
InlineTypedObject::objectMovedDuringMinorGC(this, dst, src);
} else if (src->is<TypedArrayObject>()) {
tenuredSize += TypedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind);
- } else if (src->is<UnboxedArrayObject>()) {
- tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind);
} else if (src->is<ArgumentsObject>()) {
tenuredSize += ArgumentsObject::objectMovedDuringMinorGC(this, dst, src);
} else if (src->is<ProxyObject>()) {
@@ -2892,10 +2846,9 @@ struct UnmarkGrayTracer : public JS::CallbackTracer
*
* There is an additional complication for certain kinds of edges that are not
* contained explicitly in the source object itself, such as from a weakmap key
- * to its value, and from an object being watched by a watchpoint to the
- * watchpoint's closure. These "implicit edges" are represented in some other
- * container object, such as the weakmap or the watchpoint itself. In these
- * cases, calling unmark gray on an object won't find all of its children.
+ * to its value. These "implicit edges" are represented in some other
+ * container object, such as the weakmap itself. In these cases, calling unmark
+ * gray on an object won't find all of its children.
*
* Handling these implicit edges has two parts:
* - A special pass enumerating all of the containers that know about the
diff --git a/js/src/gc/Memory.cpp b/js/src/gc/Memory.cpp
index 26da75469..418984057 100644
--- a/js/src/gc/Memory.cpp
+++ b/js/src/gc/Memory.cpp
@@ -17,11 +17,6 @@
#include "jswin.h"
#include <psapi.h>
-#elif defined(SOLARIS)
-
-#include <sys/mman.h>
-#include <unistd.h>
-
#elif defined(XP_UNIX)
#include <algorithm>
@@ -408,86 +403,6 @@ DeallocateMappedContent(void* p, size_t length)
# endif
-#elif defined(SOLARIS)
-
-#ifndef MAP_NOSYNC
-# define MAP_NOSYNC 0
-#endif
-
-void
-InitMemorySubsystem()
-{
- if (pageSize == 0)
- pageSize = allocGranularity = size_t(sysconf(_SC_PAGESIZE));
-}
-
-void*
-MapAlignedPages(size_t size, size_t alignment)
-{
- MOZ_ASSERT(size >= alignment);
- MOZ_ASSERT(size >= allocGranularity);
- MOZ_ASSERT(size % alignment == 0);
- MOZ_ASSERT(size % pageSize == 0);
- MOZ_ASSERT_IF(alignment < allocGranularity, allocGranularity % alignment == 0);
- MOZ_ASSERT_IF(alignment > allocGranularity, alignment % allocGranularity == 0);
-
- int prot = PROT_READ | PROT_WRITE;
- int flags = MAP_PRIVATE | MAP_ANON | MAP_ALIGN | MAP_NOSYNC;
-
- void* p = mmap((caddr_t)alignment, size, prot, flags, -1, 0);
- if (p == MAP_FAILED)
- return nullptr;
- return p;
-}
-
-static void*
-MapAlignedPagesLastDitch(size_t size, size_t alignment)
-{
- return nullptr;
-}
-
-void
-UnmapPages(void* p, size_t size)
-{
- MOZ_ALWAYS_TRUE(0 == munmap((caddr_t)p, size));
-}
-
-bool
-MarkPagesUnused(void* p, size_t size)
-{
- MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
- return true;
-}
-
-bool
-MarkPagesInUse(void* p, size_t size)
-{
- if (!DecommitEnabled())
- return;
-
- MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
-}
-
-size_t
-GetPageFaultCount()
-{
- return 0;
-}
-
-void*
-AllocateMappedContent(int fd, size_t offset, size_t length, size_t alignment)
-{
- // Not implemented.
- return nullptr;
-}
-
-// Deallocate mapped memory for object.
-void
-DeallocateMappedContent(void* p, size_t length)
-{
- // Not implemented.
-}
-
#elif defined(XP_UNIX)
void
@@ -500,8 +415,15 @@ InitMemorySubsystem()
static inline void*
MapMemoryAt(void* desired, size_t length, int prot = PROT_READ | PROT_WRITE,
int flags = MAP_PRIVATE | MAP_ANON, int fd = -1, off_t offset = 0)
+
+// Solaris manages 64-bit address space in a different manner from every other
+// AMD64 operating system, but fortunately the fix is the same one
+// required for every operating system on 64-bit SPARC, Itanium, and ARM.
+// Most people's intuition failed them here and they thought this couldn't
+// possibly be correct on AMD64, but for Solaris/illumos it is.
+
{
-#if defined(__ia64__) || (defined(__sparc64__) && defined(__NetBSD__)) || defined(__aarch64__)
+#if defined(__ia64__) || (defined(__sparc64__) && defined(__NetBSD__)) || defined(__aarch64__) || (defined(__sun) && defined(__x86_64__))
MOZ_ASSERT((0xffff800000000000ULL & (uintptr_t(desired) + length - 1)) == 0);
#endif
void* region = mmap(desired, length, prot, flags, fd, offset);
@@ -551,7 +473,7 @@ MapMemory(size_t length, int prot = PROT_READ | PROT_WRITE,
return nullptr;
}
return region;
-#elif defined(__aarch64__)
+#elif defined(__aarch64__) || (defined(__sun) && defined(__x86_64__))
/*
* There might be similar virtual address issue on arm64 which depends on
* hardware and kernel configurations. But the work around is slightly
@@ -763,7 +685,11 @@ MarkPagesUnused(void* p, size_t size)
return false;
MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
- int result = madvise(p, size, MADV_DONTNEED);
+#ifdef XP_SOLARIS
+ int result = posix_madvise(p, size, POSIX_MADV_DONTNEED);
+#else
+ int result = madvise(p, size, MADV_DONTNEED);
+#endif
return result != -1;
}
diff --git a/js/src/gc/Nursery.cpp b/js/src/gc/Nursery.cpp
index 55ca5a059..93a0eb6a8 100644
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -210,6 +210,7 @@ js::Nursery::disable()
return;
updateNumChunks(0);
currentEnd_ = 0;
+ position_ = 0;
runtime()->gc.storeBuffer.disable();
}
@@ -530,7 +531,6 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason)
// the nursery is full, look for object groups that are getting promoted
// excessively and try to pretenure them.
maybeStartProfile(ProfileKey::Pretenure);
- uint32_t pretenureCount = 0;
if (promotionRate > 0.8 || reason == JS::gcreason::FULL_STORE_BUFFER) {
JSContext* cx = rt->contextFromMainThread();
for (auto& entry : tenureCounts.entries) {
@@ -539,7 +539,6 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason)
if (group->canPreTenure()) {
AutoCompartment ac(cx, group->compartment());
group->setShouldPreTenure(cx);
- pretenureCount++;
}
}
}
@@ -556,12 +555,6 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason)
minorGcCount_++;
int64_t totalTime = profileTimes_[ProfileKey::Total];
- rt->addTelemetry(JS_TELEMETRY_GC_MINOR_US, totalTime);
- rt->addTelemetry(JS_TELEMETRY_GC_MINOR_REASON, reason);
- if (totalTime > 1000)
- rt->addTelemetry(JS_TELEMETRY_GC_MINOR_REASON_LONG, reason);
- rt->addTelemetry(JS_TELEMETRY_GC_NURSERY_BYTES, sizeOfHeapCommitted());
- rt->addTelemetry(JS_TELEMETRY_GC_PRETENURE_COUNT, pretenureCount);
rt->gc.stats.endNurseryCollection(reason);
TraceMinorGCEnd();
diff --git a/js/src/gc/Nursery.h b/js/src/gc/Nursery.h
index 0d215d997..a839a4979 100644
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -245,6 +245,7 @@ class Nursery
// Free space remaining, not counting chunk trailers.
MOZ_ALWAYS_INLINE size_t freeSpace() const {
+ MOZ_ASSERT(isEnabled());
MOZ_ASSERT(currentEnd_ - position_ <= NurseryChunkUsableSize);
return (currentEnd_ - position_) +
(numChunks() - currentChunk_ - 1) * NurseryChunkUsableSize;
diff --git a/js/src/gc/RootMarking.cpp b/js/src/gc/RootMarking.cpp
index 93264084b..7d665e8eb 100644
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -14,7 +14,6 @@
#include "jsgc.h"
#include "jsprf.h"
#include "jstypes.h"
-#include "jswatchpoint.h"
#include "builtin/MapObject.h"
#include "frontend/BytecodeCompiler.h"
@@ -478,6 +477,7 @@ js::gc::GCRuntime::bufferGrayRoots()
for (GCZonesIter zone(rt); !zone.done(); zone.next())
MOZ_ASSERT(zone->gcGrayRoots.empty());
+ gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS);
BufferGrayRootsTracer grayBufferer(rt);
if (JSTraceDataOp op = grayRootTracer.op)
@@ -540,4 +540,3 @@ GCRuntime::resetBufferedGrayRoots() const
for (GCZonesIter zone(rt); !zone.done(); zone.next())
zone->gcGrayRoots.clearAndFree();
}
-
diff --git a/js/src/gc/Statistics.cpp b/js/src/gc/Statistics.cpp
index 19f9986dd..8a9f4e135 100644
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -34,13 +34,6 @@ using mozilla::MakeRange;
using mozilla::PodArrayZero;
using mozilla::PodZero;
-/*
- * If this fails, then you can either delete this assertion and allow all
- * larger-numbered reasons to pile up in the last telemetry bucket, or switch
- * to GC_REASON_3 and bump the max value.
- */
-JS_STATIC_ASSERT(JS::gcreason::NUM_TELEMETRY_REASONS >= JS::gcreason::NUM_REASONS);
-
const char*
js::gcstats::ExplainInvocationKind(JSGCInvocationKind gckind)
{
@@ -92,7 +85,6 @@ struct PhaseInfo
Phase index;
const char* name;
Phase parent;
- const uint8_t telemetryBucket;
};
// The zeroth entry in the timing arrays is used for phases that have a
@@ -134,78 +126,74 @@ struct DagChildEdge {
*/
static const PhaseInfo phases[] = {
- { PHASE_MUTATOR, "Mutator Running", PHASE_NO_PARENT, 0 },
- { PHASE_GC_BEGIN, "Begin Callback", PHASE_NO_PARENT, 1 },
- { PHASE_WAIT_BACKGROUND_THREAD, "Wait Background Thread", PHASE_NO_PARENT, 2 },
- { PHASE_MARK_DISCARD_CODE, "Mark Discard Code", PHASE_NO_PARENT, 3 },
- { PHASE_RELAZIFY_FUNCTIONS, "Relazify Functions", PHASE_NO_PARENT, 4 },
- { PHASE_PURGE, "Purge", PHASE_NO_PARENT, 5 },
- { PHASE_MARK, "Mark", PHASE_NO_PARENT, 6 },
- { PHASE_UNMARK, "Unmark", PHASE_MARK, 7 },
+ { PHASE_MUTATOR, "Mutator Running", PHASE_NO_PARENT },
+ { PHASE_GC_BEGIN, "Begin Callback", PHASE_NO_PARENT },
+ { PHASE_WAIT_BACKGROUND_THREAD, "Wait Background Thread", PHASE_NO_PARENT },
+ { PHASE_MARK_DISCARD_CODE, "Mark Discard Code", PHASE_NO_PARENT },
+ { PHASE_RELAZIFY_FUNCTIONS, "Relazify Functions", PHASE_NO_PARENT },
+ { PHASE_PURGE, "Purge", PHASE_NO_PARENT },
+ { PHASE_MARK, "Mark", PHASE_NO_PARENT },
+ { PHASE_UNMARK, "Unmark", PHASE_MARK },
/* PHASE_MARK_ROOTS */
- { PHASE_MARK_DELAYED, "Mark Delayed", PHASE_MARK, 8 },
- { PHASE_SWEEP, "Sweep", PHASE_NO_PARENT, 9 },
- { PHASE_SWEEP_MARK, "Mark During Sweeping", PHASE_SWEEP, 10 },
- { PHASE_SWEEP_MARK_TYPES, "Mark Types During Sweeping", PHASE_SWEEP_MARK, 11 },
- { PHASE_SWEEP_MARK_INCOMING_BLACK, "Mark Incoming Black Pointers", PHASE_SWEEP_MARK, 12 },
- { PHASE_SWEEP_MARK_WEAK, "Mark Weak", PHASE_SWEEP_MARK, 13 },
- { PHASE_SWEEP_MARK_INCOMING_GRAY, "Mark Incoming Gray Pointers", PHASE_SWEEP_MARK, 14 },
- { PHASE_SWEEP_MARK_GRAY, "Mark Gray", PHASE_SWEEP_MARK, 15 },
- { PHASE_SWEEP_MARK_GRAY_WEAK, "Mark Gray and Weak", PHASE_SWEEP_MARK, 16 },
- { PHASE_FINALIZE_START, "Finalize Start Callbacks", PHASE_SWEEP, 17 },
- { PHASE_WEAK_ZONEGROUP_CALLBACK, "Per-Slice Weak Callback", PHASE_FINALIZE_START, 57 },
- { PHASE_WEAK_COMPARTMENT_CALLBACK, "Per-Compartment Weak Callback", PHASE_FINALIZE_START, 58 },
- { PHASE_SWEEP_ATOMS, "Sweep Atoms", PHASE_SWEEP, 18 },
- { PHASE_SWEEP_SYMBOL_REGISTRY, "Sweep Symbol Registry", PHASE_SWEEP, 19 },
- { PHASE_SWEEP_COMPARTMENTS, "Sweep Compartments", PHASE_SWEEP, 20 },
- { PHASE_SWEEP_DISCARD_CODE, "Sweep Discard Code", PHASE_SWEEP_COMPARTMENTS, 21 },
- { PHASE_SWEEP_INNER_VIEWS, "Sweep Inner Views", PHASE_SWEEP_COMPARTMENTS, 22 },
- { PHASE_SWEEP_CC_WRAPPER, "Sweep Cross Compartment Wrappers", PHASE_SWEEP_COMPARTMENTS, 23 },
- { PHASE_SWEEP_BASE_SHAPE, "Sweep Base Shapes", PHASE_SWEEP_COMPARTMENTS, 24 },
- { PHASE_SWEEP_INITIAL_SHAPE, "Sweep Initial Shapes", PHASE_SWEEP_COMPARTMENTS, 25 },
- { PHASE_SWEEP_TYPE_OBJECT, "Sweep Type Objects", PHASE_SWEEP_COMPARTMENTS, 26 },
- { PHASE_SWEEP_BREAKPOINT, "Sweep Breakpoints", PHASE_SWEEP_COMPARTMENTS, 27 },
- { PHASE_SWEEP_REGEXP, "Sweep Regexps", PHASE_SWEEP_COMPARTMENTS, 28 },
- { PHASE_SWEEP_MISC, "Sweep Miscellaneous", PHASE_SWEEP_COMPARTMENTS, 29 },
- { PHASE_SWEEP_TYPES, "Sweep type information", PHASE_SWEEP_COMPARTMENTS, 30 },
- { PHASE_SWEEP_TYPES_BEGIN, "Sweep type tables and compilations", PHASE_SWEEP_TYPES, 31 },
- { PHASE_SWEEP_TYPES_END, "Free type arena", PHASE_SWEEP_TYPES, 32 },
- { PHASE_SWEEP_OBJECT, "Sweep Object", PHASE_SWEEP, 33 },
- { PHASE_SWEEP_STRING, "Sweep String", PHASE_SWEEP, 34 },
- { PHASE_SWEEP_SCRIPT, "Sweep Script", PHASE_SWEEP, 35 },
- { PHASE_SWEEP_SCOPE, "Sweep Scope", PHASE_SWEEP, 59 },
- { PHASE_SWEEP_SHAPE, "Sweep Shape", PHASE_SWEEP, 36 },
- { PHASE_SWEEP_JITCODE, "Sweep JIT code", PHASE_SWEEP, 37 },
- { PHASE_FINALIZE_END, "Finalize End Callback", PHASE_SWEEP, 38 },
- { PHASE_DESTROY, "Deallocate", PHASE_SWEEP, 39 },
- { PHASE_COMPACT, "Compact", PHASE_NO_PARENT, 40 },
- { PHASE_COMPACT_MOVE, "Compact Move", PHASE_COMPACT, 41 },
- { PHASE_COMPACT_UPDATE, "Compact Update", PHASE_COMPACT, 42 },
+ { PHASE_MARK_DELAYED, "Mark Delayed", PHASE_MARK },
+ { PHASE_SWEEP, "Sweep", PHASE_NO_PARENT },
+ { PHASE_SWEEP_MARK, "Mark During Sweeping", PHASE_SWEEP },
+ { PHASE_SWEEP_MARK_TYPES, "Mark Types During Sweeping", PHASE_SWEEP_MARK },
+ { PHASE_SWEEP_MARK_INCOMING_BLACK, "Mark Incoming Black Pointers", PHASE_SWEEP_MARK },
+ { PHASE_SWEEP_MARK_WEAK, "Mark Weak", PHASE_SWEEP_MARK },
+ { PHASE_SWEEP_MARK_INCOMING_GRAY, "Mark Incoming Gray Pointers", PHASE_SWEEP_MARK },
+ { PHASE_SWEEP_MARK_GRAY, "Mark Gray", PHASE_SWEEP_MARK },
+ { PHASE_SWEEP_MARK_GRAY_WEAK, "Mark Gray and Weak", PHASE_SWEEP_MARK },
+ { PHASE_FINALIZE_START, "Finalize Start Callbacks", PHASE_SWEEP },
+ { PHASE_WEAK_ZONEGROUP_CALLBACK, "Per-Slice Weak Callback", PHASE_FINALIZE_START },
+ { PHASE_WEAK_COMPARTMENT_CALLBACK, "Per-Compartment Weak Callback", PHASE_FINALIZE_START },
+ { PHASE_SWEEP_ATOMS, "Sweep Atoms", PHASE_SWEEP },
+ { PHASE_SWEEP_SYMBOL_REGISTRY, "Sweep Symbol Registry", PHASE_SWEEP },
+ { PHASE_SWEEP_COMPARTMENTS, "Sweep Compartments", PHASE_SWEEP },
+ { PHASE_SWEEP_DISCARD_CODE, "Sweep Discard Code", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_INNER_VIEWS, "Sweep Inner Views", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_CC_WRAPPER, "Sweep Cross Compartment Wrappers", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_BASE_SHAPE, "Sweep Base Shapes", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_INITIAL_SHAPE, "Sweep Initial Shapes", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_TYPE_OBJECT, "Sweep Type Objects", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_BREAKPOINT, "Sweep Breakpoints", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_REGEXP, "Sweep Regexps", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_MISC, "Sweep Miscellaneous", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_TYPES, "Sweep type information", PHASE_SWEEP_COMPARTMENTS },
+ { PHASE_SWEEP_TYPES_BEGIN, "Sweep type tables and compilations", PHASE_SWEEP_TYPES },
+ { PHASE_SWEEP_TYPES_END, "Free type arena", PHASE_SWEEP_TYPES },
+ { PHASE_SWEEP_OBJECT, "Sweep Object", PHASE_SWEEP },
+ { PHASE_SWEEP_STRING, "Sweep String", PHASE_SWEEP },
+ { PHASE_SWEEP_SCRIPT, "Sweep Script", PHASE_SWEEP },
+ { PHASE_SWEEP_SCOPE, "Sweep Scope", PHASE_SWEEP },
+ { PHASE_SWEEP_SHAPE, "Sweep Shape", PHASE_SWEEP },
+ { PHASE_SWEEP_JITCODE, "Sweep JIT code", PHASE_SWEEP },
+ { PHASE_FINALIZE_END, "Finalize End Callback", PHASE_SWEEP },
+ { PHASE_DESTROY, "Deallocate", PHASE_SWEEP },
+ { PHASE_COMPACT, "Compact", PHASE_NO_PARENT },
+ { PHASE_COMPACT_MOVE, "Compact Move", PHASE_COMPACT },
+ { PHASE_COMPACT_UPDATE, "Compact Update", PHASE_COMPACT },
/* PHASE_MARK_ROOTS */
- { PHASE_COMPACT_UPDATE_CELLS, "Compact Update Cells", PHASE_COMPACT_UPDATE, 43 },
- { PHASE_GC_END, "End Callback", PHASE_NO_PARENT, 44 },
- { PHASE_MINOR_GC, "All Minor GCs", PHASE_NO_PARENT, 45 },
+ { PHASE_COMPACT_UPDATE_CELLS, "Compact Update Cells", PHASE_COMPACT_UPDATE },
+ { PHASE_GC_END, "End Callback", PHASE_NO_PARENT },
+ { PHASE_MINOR_GC, "All Minor GCs", PHASE_NO_PARENT },
/* PHASE_MARK_ROOTS */
- { PHASE_EVICT_NURSERY, "Minor GCs to Evict Nursery", PHASE_NO_PARENT, 46 },
+ { PHASE_EVICT_NURSERY, "Minor GCs to Evict Nursery", PHASE_NO_PARENT },
/* PHASE_MARK_ROOTS */
- { PHASE_TRACE_HEAP, "Trace Heap", PHASE_NO_PARENT, 47 },
+ { PHASE_TRACE_HEAP, "Trace Heap", PHASE_NO_PARENT },
/* PHASE_MARK_ROOTS */
- { PHASE_BARRIER, "Barriers", PHASE_NO_PARENT, 55 },
- { PHASE_UNMARK_GRAY, "Unmark gray", PHASE_BARRIER, 56 },
- { PHASE_MARK_ROOTS, "Mark Roots", PHASE_MULTI_PARENTS, 48 },
- { PHASE_BUFFER_GRAY_ROOTS, "Buffer Gray Roots", PHASE_MARK_ROOTS, 49 },
- { PHASE_MARK_CCWS, "Mark Cross Compartment Wrappers", PHASE_MARK_ROOTS, 50 },
- { PHASE_MARK_STACK, "Mark C and JS stacks", PHASE_MARK_ROOTS, 51 },
- { PHASE_MARK_RUNTIME_DATA, "Mark Runtime-wide Data", PHASE_MARK_ROOTS, 52 },
- { PHASE_MARK_EMBEDDING, "Mark Embedding", PHASE_MARK_ROOTS, 53 },
- { PHASE_MARK_COMPARTMENTS, "Mark Compartments", PHASE_MARK_ROOTS, 54 },
- { PHASE_PURGE_SHAPE_TABLES, "Purge ShapeTables", PHASE_NO_PARENT, 60 },
-
- { PHASE_LIMIT, nullptr, PHASE_NO_PARENT, 60 }
-
- // Current number of telemetryBuckets is 60. If you insert new phases
- // somewhere, start at that number and count up. Do not change any existing
- // numbers.
+ { PHASE_BARRIER, "Barriers", PHASE_NO_PARENT },
+ { PHASE_UNMARK_GRAY, "Unmark gray", PHASE_BARRIER },
+ { PHASE_MARK_ROOTS, "Mark Roots", PHASE_MULTI_PARENTS },
+ { PHASE_BUFFER_GRAY_ROOTS, "Buffer Gray Roots", PHASE_MARK_ROOTS },
+ { PHASE_MARK_CCWS, "Mark Cross Compartment Wrappers", PHASE_MARK_ROOTS },
+ { PHASE_MARK_STACK, "Mark C and JS stacks", PHASE_MARK_ROOTS },
+ { PHASE_MARK_RUNTIME_DATA, "Mark Runtime-wide Data", PHASE_MARK_ROOTS },
+ { PHASE_MARK_EMBEDDING, "Mark Embedding", PHASE_MARK_ROOTS },
+ { PHASE_MARK_COMPARTMENTS, "Mark Compartments", PHASE_MARK_ROOTS },
+ { PHASE_PURGE_SHAPE_TABLES, "Purge ShapeTables", PHASE_NO_PARENT },
+
+ { PHASE_LIMIT, nullptr, PHASE_NO_PARENT }
};
static ExtraPhaseInfo phaseExtra[PHASE_LIMIT] = { { 0, 0 } };
@@ -845,12 +833,6 @@ Statistics::~Statistics()
/* static */ bool
Statistics::initialize()
{
- for (size_t i = 0; i < PHASE_LIMIT; i++) {
- MOZ_ASSERT(phases[i].index == i);
- for (size_t j = 0; j < PHASE_LIMIT; j++)
- MOZ_ASSERT_IF(i != j, phases[i].telemetryBucket != phases[j].telemetryBucket);
- }
-
// Create a static table of descendants for every phase with multiple
// children. This assumes that all descendants come linearly in the
// list, which is reasonable since full dags are not supported; any
@@ -925,32 +907,6 @@ Statistics::getMaxGCPauseSinceClear()
return maxPauseInInterval;
}
-// Sum up the time for a phase, including instances of the phase with different
-// parents.
-static int64_t
-SumPhase(Phase phase, const Statistics::PhaseTimeTable times)
-{
- int64_t sum = 0;
- for (auto i : MakeRange(Statistics::NumTimingArrays))
- sum += times[i][phase];
- return sum;
-}
-
-static Phase
-LongestPhase(const Statistics::PhaseTimeTable times)
-{
- int64_t longestTime = 0;
- Phase longestPhase = PHASE_NONE;
- for (size_t i = 0; i < PHASE_LIMIT; ++i) {
- int64_t phaseTime = SumPhase(Phase(i), times);
- if (phaseTime > longestTime) {
- longestTime = phaseTime;
- longestPhase = Phase(i);
- }
- }
- return longestPhase;
-}
-
void
Statistics::printStats()
{
@@ -985,34 +941,6 @@ Statistics::endGC()
int64_t total, longest;
gcDuration(&total, &longest);
- int64_t sccTotal, sccLongest;
- sccDurations(&sccTotal, &sccLongest);
-
- runtime->addTelemetry(JS_TELEMETRY_GC_IS_ZONE_GC, !zoneStats.isCollectingAllZones());
- runtime->addTelemetry(JS_TELEMETRY_GC_MS, t(total));
- runtime->addTelemetry(JS_TELEMETRY_GC_MAX_PAUSE_MS, t(longest));
- int64_t markTotal = SumPhase(PHASE_MARK, phaseTimes);
- int64_t markRootsTotal = SumPhase(PHASE_MARK_ROOTS, phaseTimes);
- runtime->addTelemetry(JS_TELEMETRY_GC_MARK_MS, t(markTotal));
- runtime->addTelemetry(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_DAG_NONE][PHASE_SWEEP]));
- if (runtime->gc.isCompactingGc()) {
- runtime->addTelemetry(JS_TELEMETRY_GC_COMPACT_MS,
- t(phaseTimes[PHASE_DAG_NONE][PHASE_COMPACT]));
- }
- runtime->addTelemetry(JS_TELEMETRY_GC_MARK_ROOTS_MS, t(markRootsTotal));
- runtime->addTelemetry(JS_TELEMETRY_GC_MARK_GRAY_MS, t(phaseTimes[PHASE_DAG_NONE][PHASE_SWEEP_MARK_GRAY]));
- runtime->addTelemetry(JS_TELEMETRY_GC_NON_INCREMENTAL, nonincremental());
- if (nonincremental())
- runtime->addTelemetry(JS_TELEMETRY_GC_NON_INCREMENTAL_REASON, uint32_t(nonincrementalReason_));
- runtime->addTelemetry(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gc.isIncrementalGCAllowed());
- runtime->addTelemetry(JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS, t(sccTotal));
- runtime->addTelemetry(JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS, t(sccLongest));
-
- if (!aborted) {
- double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
- runtime->addTelemetry(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
- }
-
if (fp)
printStats();
@@ -1061,8 +989,6 @@ Statistics::beginSlice(const ZoneGCStats& zoneStats, JSGCInvocationKind gckind,
return;
}
- runtime->addTelemetry(JS_TELEMETRY_GC_REASON, reason);
-
// Slice callbacks should only fire for the outermost level.
if (gcDepth == 1) {
bool wasFullGC = zoneStats.isCollectingAllZones();
@@ -1082,25 +1008,6 @@ Statistics::endSlice()
slices.back().endFaults = GetPageFaultCount();
slices.back().finalState = runtime->gc.state();
- int64_t sliceTime = slices.back().end - slices.back().start;
- runtime->addTelemetry(JS_TELEMETRY_GC_SLICE_MS, t(sliceTime));
- runtime->addTelemetry(JS_TELEMETRY_GC_RESET, slices.back().wasReset());
- if (slices.back().wasReset())
- runtime->addTelemetry(JS_TELEMETRY_GC_RESET_REASON, uint32_t(slices.back().resetReason));
-
- if (slices.back().budget.isTimeBudget()) {
- int64_t budget_ms = slices.back().budget.timeBudget.budget;
- runtime->addTelemetry(JS_TELEMETRY_GC_BUDGET_MS, budget_ms);
- if (budget_ms == runtime->gc.defaultSliceBudget())
- runtime->addTelemetry(JS_TELEMETRY_GC_ANIMATION_MS, t(sliceTime));
-
- // Record any phase that goes more than 2x over its budget.
- if (sliceTime > 2 * budget_ms * 1000) {
- Phase longest = LongestPhase(slices.back().phaseTimes);
- runtime->addTelemetry(JS_TELEMETRY_GC_SLOW_PHASE, phases[longest].telemetryBucket);
- }
- }
-
sliceCount_++;
}
diff --git a/js/src/gc/Statistics.h b/js/src/gc/Statistics.h
index ca1969b2c..08a2810cf 100644
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -10,7 +10,6 @@
#include "mozilla/EnumeratedArray.h"
#include "mozilla/IntegerRange.h"
#include "mozilla/Maybe.h"
-#include "mozilla/PodOperations.h"
#include "jsalloc.h"
#include "jsgc.h"
@@ -112,29 +111,26 @@ enum Stat {
struct ZoneGCStats
{
/* Number of zones collected in this GC. */
- int collectedZoneCount;
+ int collectedZoneCount = 0;
/* Total number of zones in the Runtime at the start of this GC. */
- int zoneCount;
+ int zoneCount = 0;
/* Number of zones swept in this GC. */
- int sweptZoneCount;
+ int sweptZoneCount = 0;
/* Total number of compartments in all zones collected. */
- int collectedCompartmentCount;
+ int collectedCompartmentCount = 0;
/* Total number of compartments in the Runtime at the start of this GC. */
- int compartmentCount;
+ int compartmentCount = 0;
/* Total number of compartments swept by this GC. */
- int sweptCompartmentCount;
+ int sweptCompartmentCount = 0;
bool isCollectingAllZones() const { return collectedZoneCount == zoneCount; }
- ZoneGCStats()
- : collectedZoneCount(0), zoneCount(0), sweptZoneCount(0),
- collectedCompartmentCount(0), compartmentCount(0), sweptCompartmentCount(0)
- {}
+ ZoneGCStats() = default;
};
#define FOR_EACH_GC_PROFILE_TIME(_) \
diff --git a/js/src/gc/Tracer.cpp b/js/src/gc/Tracer.cpp
index 63cd9b08a..3416464dd 100644
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -201,80 +201,15 @@ gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, Shape* shape)
} while (shape);
}
-// Object groups can point to other object groups via an UnboxedLayout or the
-// the original unboxed group link. There can potentially be deep or cyclic
-// chains of such groups to trace through without going through a thing that
-// participates in cycle collection. These need to be handled iteratively to
-// avoid blowing the stack when running the cycle collector's callback tracer.
-struct ObjectGroupCycleCollectorTracer : public JS::CallbackTracer
-{
- explicit ObjectGroupCycleCollectorTracer(JS::CallbackTracer* innerTracer)
- : JS::CallbackTracer(innerTracer->runtime(), DoNotTraceWeakMaps),
- innerTracer(innerTracer)
- {}
-
- void onChild(const JS::GCCellPtr& thing) override;
-
- JS::CallbackTracer* innerTracer;
- Vector<ObjectGroup*, 4, SystemAllocPolicy> seen, worklist;
-};
-
-void
-ObjectGroupCycleCollectorTracer::onChild(const JS::GCCellPtr& thing)
-{
- if (thing.is<BaseShape>()) {
- // The CC does not care about BaseShapes, and no additional GC things
- // will be reached by following this edge.
- return;
- }
-
- if (thing.is<JSObject>() || thing.is<JSScript>()) {
- // Invoke the inner cycle collector callback on this child. It will not
- // recurse back into TraceChildren.
- innerTracer->onChild(thing);
- return;
- }
-
- if (thing.is<ObjectGroup>()) {
- // If this group is required to be in an ObjectGroup chain, trace it
- // via the provided worklist rather than continuing to recurse.
- ObjectGroup& group = thing.as<ObjectGroup>();
- if (group.maybeUnboxedLayout()) {
- for (size_t i = 0; i < seen.length(); i++) {
- if (seen[i] == &group)
- return;
- }
- if (seen.append(&group) && worklist.append(&group)) {
- return;
- } else {
- // If append fails, keep tracing normally. The worst that will
- // happen is we end up overrecursing.
- }
- }
- }
-
- TraceChildren(this, thing.asCell(), thing.kind());
-}
-
void
gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, ObjectGroup* group)
{
MOZ_ASSERT(trc->isCallbackTracer());
- // Early return if this group is not required to be in an ObjectGroup chain.
- if (!group->maybeUnboxedLayout())
- return group->traceChildren(trc);
-
- ObjectGroupCycleCollectorTracer groupTracer(trc->asCallbackTracer());
- group->traceChildren(&groupTracer);
-
- while (!groupTracer.worklist.empty()) {
- ObjectGroup* innerGroup = groupTracer.worklist.popCopy();
- innerGroup->traceChildren(&groupTracer);
- }
+ group->traceChildren(trc);
}
-
+
/*** Traced Edge Printer *************************************************************************/
static size_t
diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp
index ed099341c..f0cdde012 100644
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -370,6 +370,21 @@ Zone::fixupAfterMovingGC()
fixupInitialShapeTable();
}
+bool
+Zone::addTypeDescrObject(JSContext* cx, HandleObject obj)
+{
+ // Type descriptor objects are always tenured so we don't need post barriers
+ // on the set.
+ MOZ_ASSERT(!IsInsideNursery(obj));
+
+ if (!typeDescrObjects.put(obj)) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
+
+ return true;
+}
+
ZoneList::ZoneList()
: head(nullptr), tail(nullptr)
{}
diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h
index 50d06319d..c8520ed55 100644
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -349,10 +349,17 @@ struct Zone : public JS::shadow::Zone,
// Keep track of all TypeDescr and related objects in this compartment.
// This is used by the GC to trace them all first when compacting, since the
// TypedObject trace hook may access these objects.
- using TypeDescrObjectSet = js::GCHashSet<js::HeapPtr<JSObject*>,
- js::MovableCellHasher<js::HeapPtr<JSObject*>>,
+
+ //
+ // There are no barriers here - the set contains only tenured objects so no
+ // post-barrier is required, and these are weak references so no pre-barrier
+ // is required.
+ using TypeDescrObjectSet = js::GCHashSet<JSObject*,
+ js::MovableCellHasher<JSObject*>,
js::SystemAllocPolicy>;
JS::WeakCache<TypeDescrObjectSet> typeDescrObjects;
+
+ bool addTypeDescrObject(JSContext* cx, HandleObject obj);
// Malloc counter to measure memory pressure for GC scheduling. It runs from