diff options
Diffstat (limited to 'js/src/gc')
-rw-r--r-- | js/src/gc/GCRuntime.h | 2 | ||||
-rw-r--r-- | js/src/gc/Marking.cpp | 48 | ||||
-rw-r--r-- | js/src/gc/Memory.cpp | 6 | ||||
-rw-r--r-- | js/src/gc/Nursery.cpp | 3 | ||||
-rw-r--r-- | js/src/gc/Tracer.cpp | 69 |
5 files changed, 122 insertions, 6 deletions
diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h index f102e9ef0..f43dcd351 100644 --- a/js/src/gc/GCRuntime.h +++ b/js/src/gc/GCRuntime.h @@ -741,7 +741,9 @@ class GCRuntime void removeBlackRootsTracer(JSTraceDataOp traceOp, void* data); void setMaxMallocBytes(size_t value); +#ifdef MOZ_DEVTOOLS_SERVER int32_t getMallocBytes() const { return mallocBytesUntilGC; } +#endif void resetMallocBytes(); bool isTooMuchMalloc() const { return mallocBytesUntilGC <= 0; } void updateMallocCounter(JS::Zone* zone, size_t nbytes); diff --git a/js/src/gc/Marking.cpp b/js/src/gc/Marking.cpp index 262fc8cbc..43e325394 100644 --- a/js/src/gc/Marking.cpp +++ b/js/src/gc/Marking.cpp @@ -18,7 +18,6 @@ #include "builtin/ModuleObject.h" #include "gc/GCInternals.h" #include "gc/Policy.h" -#include "gc/StoreBuffer-inl.h" #include "jit/IonCode.h" #include "js/SliceBudget.h" #include "vm/ArgumentsObject.h" @@ -29,6 +28,7 @@ #include "vm/Shape.h" #include "vm/Symbol.h" #include "vm/TypedArrayObject.h" +#include "vm/UnboxedObject.h" #include "wasm/WasmJS.h" #include "jscompartmentinlines.h" @@ -37,6 +37,7 @@ #include "gc/Nursery-inl.h" #include "vm/String-inl.h" +#include "vm/UnboxedObject-inl.h" using namespace js; using namespace js::gc; @@ -1394,6 +1395,14 @@ js::ObjectGroup::traceChildren(JSTracer* trc) if (maybePreliminaryObjects()) maybePreliminaryObjects()->trace(trc); + if (maybeUnboxedLayout()) + unboxedLayout().trace(trc); + + if (ObjectGroup* unboxedGroup = maybeOriginalUnboxedGroup()) { + TraceManuallyBarrieredEdge(trc, &unboxedGroup, "group_original_unboxed_group"); + setOriginalUnboxedGroup(unboxedGroup); + } + if (JSObject* descr = maybeTypeDescr()) { TraceManuallyBarrieredEdge(trc, &descr, "group_type_descr"); setTypeDescr(&descr->as<TypeDescr>()); @@ -1427,6 +1436,12 @@ js::GCMarker::lazilyMarkChildren(ObjectGroup* group) if (group->maybePreliminaryObjects()) group->maybePreliminaryObjects()->trace(this); + if (group->maybeUnboxedLayout()) + group->unboxedLayout().trace(this); + + if (ObjectGroup* unboxedGroup = group->maybeOriginalUnboxedGroup()) + traverseEdge(group, unboxedGroup); + if (TypeDescr* descr = group->maybeTypeDescr()) traverseEdge(group, static_cast<JSObject*>(descr)); @@ -1469,6 +1484,23 @@ CallTraceHook(Functor f, JSTracer* trc, JSObject* obj, CheckGeneration check, Ar return nullptr; } + if (clasp == &UnboxedPlainObject::class_) { + JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando(); + if (*pexpando) + f(pexpando, mozilla::Forward<Args>(args)...); + + UnboxedPlainObject& unboxed = obj->as<UnboxedPlainObject>(); + const UnboxedLayout& layout = check == CheckGeneration::DoChecks + ? unboxed.layout() + : unboxed.layoutDontCheckGeneration(); + if (layout.traceList()) { + VisitTraceList(f, layout.traceList(), unboxed.data(), + mozilla::Forward<Args>(args)...); + } + + return nullptr; + } + clasp->doTrace(trc, obj); if (!clasp->isNative()) @@ -2261,6 +2293,18 @@ static inline void TraceWholeCell(TenuringTracer& mover, JSObject* object) { mover.traceObject(object); + + // Additionally trace the expando object attached to any unboxed plain + // objects. Baseline and Ion can write properties to the expando while + // only adding a post barrier to the owning unboxed object. Note that + // it isn't possible for a nursery unboxed object to have a tenured + // expando, so that adding a post barrier on the original object will + // capture any tenured->nursery edges in the expando as well. + + if (object->is<UnboxedPlainObject>()) { + if (UnboxedExpandoObject* expando = object->as<UnboxedPlainObject>().maybeExpando()) + expando->traceChildren(&mover); + } } static inline void @@ -2504,6 +2548,8 @@ js::TenuringTracer::moveObjectToTenured(JSObject* dst, JSObject* src, AllocKind InlineTypedObject::objectMovedDuringMinorGC(this, dst, src); } else if (src->is<TypedArrayObject>()) { tenuredSize += TypedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind); + } else if (src->is<UnboxedArrayObject>()) { + tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind); } else if (src->is<ArgumentsObject>()) { tenuredSize += ArgumentsObject::objectMovedDuringMinorGC(this, dst, src); } else if (src->is<ProxyObject>()) { diff --git a/js/src/gc/Memory.cpp b/js/src/gc/Memory.cpp index 418984057..17c964da0 100644 --- a/js/src/gc/Memory.cpp +++ b/js/src/gc/Memory.cpp @@ -423,7 +423,7 @@ MapMemoryAt(void* desired, size_t length, int prot = PROT_READ | PROT_WRITE, // possibly be correct on AMD64, but for Solaris/illumos it is. { -#if defined(__ia64__) || (defined(__sparc64__) && defined(__NetBSD__)) || defined(__aarch64__) || (defined(__sun) && defined(__x86_64__)) +#if defined(__ia64__) || defined(__aarch64__) || (defined(__sun) && defined(__x86_64__)) || (defined(__sparc__) && defined(__arch64__) && (defined(__NetBSD__) || defined(__linux__))) MOZ_ASSERT((0xffff800000000000ULL & (uintptr_t(desired) + length - 1)) == 0); #endif void* region = mmap(desired, length, prot, flags, fd, offset); @@ -446,7 +446,7 @@ static inline void* MapMemory(size_t length, int prot = PROT_READ | PROT_WRITE, int flags = MAP_PRIVATE | MAP_ANON, int fd = -1, off_t offset = 0) { -#if defined(__ia64__) || (defined(__sparc64__) && defined(__NetBSD__)) +#if defined(__ia64__) || (defined(__sparc__) && defined(__arch64__) && defined(__NetBSD__)) /* * The JS engine assumes that all allocated pointers have their high 17 bits clear, * which ia64's mmap doesn't support directly. However, we can emulate it by passing @@ -473,7 +473,7 @@ MapMemory(size_t length, int prot = PROT_READ | PROT_WRITE, return nullptr; } return region; -#elif defined(__aarch64__) || (defined(__sun) && defined(__x86_64__)) +#elif defined(__aarch64__) || (defined(__sun) && defined(__x86_64__)) || (defined(__sparc__) && defined(__arch64__) && defined(__linux__)) /* * There might be similar virtual address issue on arm64 which depends on * hardware and kernel configurations. But the work around is slightly diff --git a/js/src/gc/Nursery.cpp b/js/src/gc/Nursery.cpp index 93a0eb6a8..737d68bd0 100644 --- a/js/src/gc/Nursery.cpp +++ b/js/src/gc/Nursery.cpp @@ -505,7 +505,10 @@ js::Nursery::collect(JSRuntime* rt, JS::gcreason::Reason reason) if (!isEnabled()) return; +#ifdef MOZ_DEVTOOLS_SERVER + // No need to obsessively track this without devtools rt->gc.incMinorGcNumber(); +#endif rt->gc.stats.beginNurseryCollection(reason); TraceMinorGCStart(); diff --git a/js/src/gc/Tracer.cpp b/js/src/gc/Tracer.cpp index 3416464dd..63cd9b08a 100644 --- a/js/src/gc/Tracer.cpp +++ b/js/src/gc/Tracer.cpp @@ -201,15 +201,80 @@ gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, Shape* shape) } while (shape); } +// Object groups can point to other object groups via an UnboxedLayout or the +// the original unboxed group link. There can potentially be deep or cyclic +// chains of such groups to trace through without going through a thing that +// participates in cycle collection. These need to be handled iteratively to +// avoid blowing the stack when running the cycle collector's callback tracer. +struct ObjectGroupCycleCollectorTracer : public JS::CallbackTracer +{ + explicit ObjectGroupCycleCollectorTracer(JS::CallbackTracer* innerTracer) + : JS::CallbackTracer(innerTracer->runtime(), DoNotTraceWeakMaps), + innerTracer(innerTracer) + {} + + void onChild(const JS::GCCellPtr& thing) override; + + JS::CallbackTracer* innerTracer; + Vector<ObjectGroup*, 4, SystemAllocPolicy> seen, worklist; +}; + +void +ObjectGroupCycleCollectorTracer::onChild(const JS::GCCellPtr& thing) +{ + if (thing.is<BaseShape>()) { + // The CC does not care about BaseShapes, and no additional GC things + // will be reached by following this edge. + return; + } + + if (thing.is<JSObject>() || thing.is<JSScript>()) { + // Invoke the inner cycle collector callback on this child. It will not + // recurse back into TraceChildren. + innerTracer->onChild(thing); + return; + } + + if (thing.is<ObjectGroup>()) { + // If this group is required to be in an ObjectGroup chain, trace it + // via the provided worklist rather than continuing to recurse. + ObjectGroup& group = thing.as<ObjectGroup>(); + if (group.maybeUnboxedLayout()) { + for (size_t i = 0; i < seen.length(); i++) { + if (seen[i] == &group) + return; + } + if (seen.append(&group) && worklist.append(&group)) { + return; + } else { + // If append fails, keep tracing normally. The worst that will + // happen is we end up overrecursing. + } + } + } + + TraceChildren(this, thing.asCell(), thing.kind()); +} + void gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, ObjectGroup* group) { MOZ_ASSERT(trc->isCallbackTracer()); - group->traceChildren(trc); -} + // Early return if this group is not required to be in an ObjectGroup chain. + if (!group->maybeUnboxedLayout()) + return group->traceChildren(trc); + + ObjectGroupCycleCollectorTracer groupTracer(trc->asCallbackTracer()); + group->traceChildren(&groupTracer); + while (!groupTracer.worklist.empty()) { + ObjectGroup* innerGroup = groupTracer.worklist.popCopy(); + innerGroup->traceChildren(&groupTracer); + } +} + /*** Traced Edge Printer *************************************************************************/ static size_t |