summaryrefslogtreecommitdiffstats
path: root/js/src/gc
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/gc')
-rw-r--r--js/src/gc/GCRuntime.h4
-rw-r--r--js/src/gc/Marking.cpp46
-rw-r--r--js/src/gc/Memory.cpp85
-rw-r--r--js/src/gc/RootMarking.cpp2
-rw-r--r--js/src/gc/Tracer.cpp69
-rw-r--r--js/src/gc/Zone.cpp15
-rw-r--r--js/src/gc/Zone.h11
7 files changed, 31 insertions, 201 deletions
diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h
index 5c2576efd..f102e9ef0 100644
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -900,7 +900,8 @@ class GCRuntime
void requestMajorGC(JS::gcreason::Reason reason);
SliceBudget defaultBudget(JS::gcreason::Reason reason, int64_t millis);
- void budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& lock);
+ void budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget,
+ AutoLockForExclusiveAccess& lock);
void resetIncrementalGC(AbortReason reason, AutoLockForExclusiveAccess& lock);
// Assert if the system state is such that we should never
@@ -915,6 +916,7 @@ class GCRuntime
void collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) JS_HAZ_GC_CALL;
MOZ_MUST_USE bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget,
JS::gcreason::Reason reason);
+ bool shouldRepeatForDeadZone(JS::gcreason::Reason reason);
void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason,
AutoLockForExclusiveAccess& lock);
diff --git a/js/src/gc/Marking.cpp b/js/src/gc/Marking.cpp
index 3ea4c9d29..47d2314c1 100644
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -18,6 +18,7 @@
#include "builtin/ModuleObject.h"
#include "gc/GCInternals.h"
#include "gc/Policy.h"
+#include "gc/StoreBuffer-inl.h"
#include "jit/IonCode.h"
#include "js/SliceBudget.h"
#include "vm/ArgumentsObject.h"
@@ -28,7 +29,6 @@
#include "vm/Shape.h"
#include "vm/Symbol.h"
#include "vm/TypedArrayObject.h"
-#include "vm/UnboxedObject.h"
#include "wasm/WasmJS.h"
#include "jscompartmentinlines.h"
@@ -37,7 +37,6 @@
#include "gc/Nursery-inl.h"
#include "vm/String-inl.h"
-#include "vm/UnboxedObject-inl.h"
using namespace js;
using namespace js::gc;
@@ -1395,14 +1394,6 @@ js::ObjectGroup::traceChildren(JSTracer* trc)
if (maybePreliminaryObjects())
maybePreliminaryObjects()->trace(trc);
- if (maybeUnboxedLayout())
- unboxedLayout().trace(trc);
-
- if (ObjectGroup* unboxedGroup = maybeOriginalUnboxedGroup()) {
- TraceManuallyBarrieredEdge(trc, &unboxedGroup, "group_original_unboxed_group");
- setOriginalUnboxedGroup(unboxedGroup);
- }
-
if (JSObject* descr = maybeTypeDescr()) {
TraceManuallyBarrieredEdge(trc, &descr, "group_type_descr");
setTypeDescr(&descr->as<TypeDescr>());
@@ -1436,12 +1427,6 @@ js::GCMarker::lazilyMarkChildren(ObjectGroup* group)
if (group->maybePreliminaryObjects())
group->maybePreliminaryObjects()->trace(this);
- if (group->maybeUnboxedLayout())
- group->unboxedLayout().trace(this);
-
- if (ObjectGroup* unboxedGroup = group->maybeOriginalUnboxedGroup())
- traverseEdge(group, unboxedGroup);
-
if (TypeDescr* descr = group->maybeTypeDescr())
traverseEdge(group, static_cast<JSObject*>(descr));
@@ -1484,23 +1469,6 @@ CallTraceHook(Functor f, JSTracer* trc, JSObject* obj, CheckGeneration check, Ar
return nullptr;
}
- if (clasp == &UnboxedPlainObject::class_) {
- JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando();
- if (*pexpando)
- f(pexpando, mozilla::Forward<Args>(args)...);
-
- UnboxedPlainObject& unboxed = obj->as<UnboxedPlainObject>();
- const UnboxedLayout& layout = check == CheckGeneration::DoChecks
- ? unboxed.layout()
- : unboxed.layoutDontCheckGeneration();
- if (layout.traceList()) {
- VisitTraceList(f, layout.traceList(), unboxed.data(),
- mozilla::Forward<Args>(args)...);
- }
-
- return nullptr;
- }
-
clasp->doTrace(trc, obj);
if (!clasp->isNative())
@@ -2293,18 +2261,6 @@ static inline void
TraceWholeCell(TenuringTracer& mover, JSObject* object)
{
mover.traceObject(object);
-
- // Additionally trace the expando object attached to any unboxed plain
- // objects. Baseline and Ion can write properties to the expando while
- // only adding a post barrier to the owning unboxed object. Note that
- // it isn't possible for a nursery unboxed object to have a tenured
- // expando, so that adding a post barrier on the original object will
- // capture any tenured->nursery edges in the expando as well.
-
- if (object->is<UnboxedPlainObject>()) {
- if (UnboxedExpandoObject* expando = object->as<UnboxedPlainObject>().maybeExpando())
- expando->traceChildren(&mover);
- }
}
static inline void
diff --git a/js/src/gc/Memory.cpp b/js/src/gc/Memory.cpp
index 26da75469..268e1e489 100644
--- a/js/src/gc/Memory.cpp
+++ b/js/src/gc/Memory.cpp
@@ -17,11 +17,6 @@
#include "jswin.h"
#include <psapi.h>
-#elif defined(SOLARIS)
-
-#include <sys/mman.h>
-#include <unistd.h>
-
#elif defined(XP_UNIX)
#include <algorithm>
@@ -408,86 +403,6 @@ DeallocateMappedContent(void* p, size_t length)
# endif
-#elif defined(SOLARIS)
-
-#ifndef MAP_NOSYNC
-# define MAP_NOSYNC 0
-#endif
-
-void
-InitMemorySubsystem()
-{
- if (pageSize == 0)
- pageSize = allocGranularity = size_t(sysconf(_SC_PAGESIZE));
-}
-
-void*
-MapAlignedPages(size_t size, size_t alignment)
-{
- MOZ_ASSERT(size >= alignment);
- MOZ_ASSERT(size >= allocGranularity);
- MOZ_ASSERT(size % alignment == 0);
- MOZ_ASSERT(size % pageSize == 0);
- MOZ_ASSERT_IF(alignment < allocGranularity, allocGranularity % alignment == 0);
- MOZ_ASSERT_IF(alignment > allocGranularity, alignment % allocGranularity == 0);
-
- int prot = PROT_READ | PROT_WRITE;
- int flags = MAP_PRIVATE | MAP_ANON | MAP_ALIGN | MAP_NOSYNC;
-
- void* p = mmap((caddr_t)alignment, size, prot, flags, -1, 0);
- if (p == MAP_FAILED)
- return nullptr;
- return p;
-}
-
-static void*
-MapAlignedPagesLastDitch(size_t size, size_t alignment)
-{
- return nullptr;
-}
-
-void
-UnmapPages(void* p, size_t size)
-{
- MOZ_ALWAYS_TRUE(0 == munmap((caddr_t)p, size));
-}
-
-bool
-MarkPagesUnused(void* p, size_t size)
-{
- MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
- return true;
-}
-
-bool
-MarkPagesInUse(void* p, size_t size)
-{
- if (!DecommitEnabled())
- return;
-
- MOZ_ASSERT(OffsetFromAligned(p, pageSize) == 0);
-}
-
-size_t
-GetPageFaultCount()
-{
- return 0;
-}
-
-void*
-AllocateMappedContent(int fd, size_t offset, size_t length, size_t alignment)
-{
- // Not implemented.
- return nullptr;
-}
-
-// Deallocate mapped memory for object.
-void
-DeallocateMappedContent(void* p, size_t length)
-{
- // Not implemented.
-}
-
#elif defined(XP_UNIX)
void
diff --git a/js/src/gc/RootMarking.cpp b/js/src/gc/RootMarking.cpp
index 93264084b..f5969bc1f 100644
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -478,6 +478,7 @@ js::gc::GCRuntime::bufferGrayRoots()
for (GCZonesIter zone(rt); !zone.done(); zone.next())
MOZ_ASSERT(zone->gcGrayRoots.empty());
+ gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS);
BufferGrayRootsTracer grayBufferer(rt);
if (JSTraceDataOp op = grayRootTracer.op)
@@ -540,4 +541,3 @@ GCRuntime::resetBufferedGrayRoots() const
for (GCZonesIter zone(rt); !zone.done(); zone.next())
zone->gcGrayRoots.clearAndFree();
}
-
diff --git a/js/src/gc/Tracer.cpp b/js/src/gc/Tracer.cpp
index 63cd9b08a..3416464dd 100644
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -201,80 +201,15 @@ gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, Shape* shape)
} while (shape);
}
-// Object groups can point to other object groups via an UnboxedLayout or the
-// the original unboxed group link. There can potentially be deep or cyclic
-// chains of such groups to trace through without going through a thing that
-// participates in cycle collection. These need to be handled iteratively to
-// avoid blowing the stack when running the cycle collector's callback tracer.
-struct ObjectGroupCycleCollectorTracer : public JS::CallbackTracer
-{
- explicit ObjectGroupCycleCollectorTracer(JS::CallbackTracer* innerTracer)
- : JS::CallbackTracer(innerTracer->runtime(), DoNotTraceWeakMaps),
- innerTracer(innerTracer)
- {}
-
- void onChild(const JS::GCCellPtr& thing) override;
-
- JS::CallbackTracer* innerTracer;
- Vector<ObjectGroup*, 4, SystemAllocPolicy> seen, worklist;
-};
-
-void
-ObjectGroupCycleCollectorTracer::onChild(const JS::GCCellPtr& thing)
-{
- if (thing.is<BaseShape>()) {
- // The CC does not care about BaseShapes, and no additional GC things
- // will be reached by following this edge.
- return;
- }
-
- if (thing.is<JSObject>() || thing.is<JSScript>()) {
- // Invoke the inner cycle collector callback on this child. It will not
- // recurse back into TraceChildren.
- innerTracer->onChild(thing);
- return;
- }
-
- if (thing.is<ObjectGroup>()) {
- // If this group is required to be in an ObjectGroup chain, trace it
- // via the provided worklist rather than continuing to recurse.
- ObjectGroup& group = thing.as<ObjectGroup>();
- if (group.maybeUnboxedLayout()) {
- for (size_t i = 0; i < seen.length(); i++) {
- if (seen[i] == &group)
- return;
- }
- if (seen.append(&group) && worklist.append(&group)) {
- return;
- } else {
- // If append fails, keep tracing normally. The worst that will
- // happen is we end up overrecursing.
- }
- }
- }
-
- TraceChildren(this, thing.asCell(), thing.kind());
-}
-
void
gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, ObjectGroup* group)
{
MOZ_ASSERT(trc->isCallbackTracer());
- // Early return if this group is not required to be in an ObjectGroup chain.
- if (!group->maybeUnboxedLayout())
- return group->traceChildren(trc);
-
- ObjectGroupCycleCollectorTracer groupTracer(trc->asCallbackTracer());
- group->traceChildren(&groupTracer);
-
- while (!groupTracer.worklist.empty()) {
- ObjectGroup* innerGroup = groupTracer.worklist.popCopy();
- innerGroup->traceChildren(&groupTracer);
- }
+ group->traceChildren(trc);
}
-
+
/*** Traced Edge Printer *************************************************************************/
static size_t
diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp
index ed099341c..f0cdde012 100644
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -370,6 +370,21 @@ Zone::fixupAfterMovingGC()
fixupInitialShapeTable();
}
+bool
+Zone::addTypeDescrObject(JSContext* cx, HandleObject obj)
+{
+ // Type descriptor objects are always tenured so we don't need post barriers
+ // on the set.
+ MOZ_ASSERT(!IsInsideNursery(obj));
+
+ if (!typeDescrObjects.put(obj)) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
+
+ return true;
+}
+
ZoneList::ZoneList()
: head(nullptr), tail(nullptr)
{}
diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h
index 50d06319d..c8520ed55 100644
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -349,10 +349,17 @@ struct Zone : public JS::shadow::Zone,
// Keep track of all TypeDescr and related objects in this compartment.
// This is used by the GC to trace them all first when compacting, since the
// TypedObject trace hook may access these objects.
- using TypeDescrObjectSet = js::GCHashSet<js::HeapPtr<JSObject*>,
- js::MovableCellHasher<js::HeapPtr<JSObject*>>,
+
+ //
+ // There are no barriers here - the set contains only tenured objects so no
+ // post-barrier is required, and these are weak references so no pre-barrier
+ // is required.
+ using TypeDescrObjectSet = js::GCHashSet<JSObject*,
+ js::MovableCellHasher<JSObject*>,
js::SystemAllocPolicy>;
JS::WeakCache<TypeDescrObjectSet> typeDescrObjects;
+
+ bool addTypeDescrObject(JSContext* cx, HandleObject obj);
// Malloc counter to measure memory pressure for GC scheduling. It runs from