/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- * vim: set ts=8 sts=2 et sw=2 tw=80: * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef gc_GCMarker_h #define gc_GCMarker_h #include "mozilla/Variant.h" #include "mozilla/XorShift128PlusRNG.h" #include "gc/Barrier.h" #include "js/HashTable.h" #include "js/TracingAPI.h" #include "js/TypeDecls.h" #include "threading/ProtectedData.h" class JSRope; namespace JS { class SliceBudget; } namespace js { class GCMarker; class WeakMapBase; #ifdef DEBUG // Force stack resizing to ensure OOM test coverage in debug builds. static const size_t MARK_STACK_BASE_CAPACITY = 4; #else static const size_t MARK_STACK_BASE_CAPACITY = 4096; #endif enum class SlotsOrElementsKind { Unused = 0, // Must match SlotsOrElementsRangeTag Elements, FixedSlots, DynamicSlots }; namespace gc { enum IncrementalProgress { NotFinished = 0, Finished }; class AutoSetMarkColor; class AutoUpdateMarkStackRanges; struct Cell; class MarkStackIter; class ParallelMarkTask; class UnmarkGrayTracer; // Ephemerons are edges from a source to a target that are only materialized // into a table when the owner is marked. (The owner is something like a // WeakMap, which contains a set of ephemerons each going from a WeakMap key to // its value.) When marking a ephemeron, only the color of the owner is needed: // the target is marked with the minimum (least-marked) color of the owner and // source. So an EphemeronEdge need store only the owner color and the target // pointer, which can fit into a tagged pointer since targets are aligned Cells. // // Note: if the owner's color changes, new EphemeronEdges will be created for // it. class EphemeronEdge { static constexpr uintptr_t ColorMask = 0x3; static_assert(uintptr_t(MarkColor::Gray) <= ColorMask); static_assert(uintptr_t(MarkColor::Black) <= ColorMask); static_assert(ColorMask < CellAlignBytes); uintptr_t taggedTarget; public: EphemeronEdge(MarkColor color, Cell* cell) : taggedTarget(uintptr_t(cell) | uintptr_t(color)) { MOZ_ASSERT((uintptr_t(cell) & ColorMask) == 0); } MarkColor color() const { return MarkColor(taggedTarget & ColorMask); } Cell* target() const { return reinterpret_cast(taggedTarget & ~ColorMask); } }; using EphemeronEdgeVector = Vector; using EphemeronEdgeTable = HashMap, js::SystemAllocPolicy>; /* * The mark stack. Pointers in this stack are "gray" in the GC sense, but * their references may be marked either black or gray (in the CC sense). * * When the mark stack is full, the GC does not call js::TraceChildren to mark * the reachable "children" of the thing. Rather the thing is put aside and * js::TraceChildren is called later when the mark stack is empty. * * To implement such delayed marking of the children with minimal overhead for * the normal case of sufficient stack, we link arenas into a list using * Arena::setNextDelayedMarkingArena(). The head of the list is stored in * GCMarker::delayedMarkingList. GCMarker::delayMarkingChildren() adds arenas * to the list as necessary while markAllDelayedChildren() pops the arenas from * the stack until it is empty. */ class MarkStack { public: /* * We use a common mark stack to mark GC things of different types and use * the explicit tags to distinguish them when it cannot be deduced from * the context of push or pop operation. */ enum Tag { SlotsOrElementsRangeTag = 0, // Must match SlotsOrElementsKind::Unused. ObjectTag, SymbolTag, JitCodeTag, ScriptTag, TempRopeTag, LastTag = TempRopeTag }; static const uintptr_t TagMask = 7; static_assert(TagMask >= uintptr_t(LastTag), "The tag mask must subsume the tags."); static_assert(TagMask <= gc::CellAlignMask, "The tag mask must be embeddable in a Cell*."); class TaggedPtr { uintptr_t bits; Cell* ptr() const; explicit TaggedPtr(uintptr_t bits); public: TaggedPtr(Tag tag, Cell* ptr); static TaggedPtr fromBits(uintptr_t bits); uintptr_t asBits() const; Tag tag() const; template T* as() const; JSObject* asRangeObject() const; JSRope* asTempRope() const; void assertValid() const; }; class SlotsOrElementsRange { uintptr_t startAndKind_; TaggedPtr ptr_; static constexpr size_t StartShift = 2; static constexpr size_t KindMask = (1 << StartShift) - 1; SlotsOrElementsRange(uintptr_t startAndKind, uintptr_t ptr); public: SlotsOrElementsRange(SlotsOrElementsKind kind, JSObject* obj, size_t start); static SlotsOrElementsRange fromBits(uintptr_t startAndKind, uintptr_t ptr); void assertValid() const; uintptr_t asBits0() const; uintptr_t asBits1() const; SlotsOrElementsKind kind() const; size_t start() const; TaggedPtr ptr() const; void setStart(size_t newStart); void setEmpty(); }; MarkStack(); ~MarkStack(); MarkStack(const MarkStack& other) = delete; MarkStack& operator=(const MarkStack& other) = delete; void swap(MarkStack& other); // The unit for capacity is mark stack words. size_t capacity() const { return capacity_; } #ifdef JS_GC_ZEAL void setMaxCapacity(size_t maxCapacity); #endif size_t position() const { return topIndex_; } [[nodiscard]] bool init(); [[nodiscard]] bool resetStackCapacity(); template [[nodiscard]] bool push(T* ptr); void infalliblePush(const SlotsOrElementsRange& range); void infalliblePush(JSObject* obj, SlotsOrElementsKind kind, size_t start); [[nodiscard]] bool push(const TaggedPtr& ptr); void infalliblePush(const TaggedPtr& ptr); // GCMarker::eagerlyMarkChildren uses unused marking stack as temporary // storage to hold rope pointers. [[nodiscard]] bool pushTempRope(JSRope* rope); bool isEmpty() const { return position() == 0; } bool hasEntries() const { return !isEmpty(); } Tag peekTag() const; TaggedPtr popPtr(); SlotsOrElementsRange popSlotsOrElementsRange(); void clearAndResetCapacity(); void clearAndFreeStack(); void poisonUnused(); [[nodiscard]] bool ensureSpace(size_t count); static size_t moveWork(GCMarker* marker, MarkStack& dst, MarkStack& src, bool allowDistribute); size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; private: uintptr_t at(size_t index) const { MOZ_ASSERT(topIndex_ <= capacity_); MOZ_ASSERT(index < topIndex_); return stack_[index]; } uintptr_t* ptr(size_t index) { MOZ_ASSERT(topIndex_ <= capacity_); MOZ_ASSERT(index <= topIndex_); return stack_ + index; } // Return a pointer to the first unused word beyond the top of the stack. uintptr_t* end() { return ptr(topIndex_); } // Grow the stack, ensuring there is space to push |count| more words. [[nodiscard]] bool enlarge(size_t count); [[nodiscard]] bool resize(size_t newCapacity); TaggedPtr peekPtr() const; [[nodiscard]] bool pushTaggedPtr(Tag tag, Cell* ptr); bool indexIsEntryBase(size_t index) const; // Area of memory containing the stack. MainThreadOrGCTaskData stack_; // Size of the stack in words. MainThreadOrGCTaskData capacity_; // Index of the top of the stack. MainThreadOrGCTaskData topIndex_; #ifdef JS_GC_ZEAL // The maximum stack capacity to grow to. MainThreadOrGCTaskData maxCapacity_{SIZE_MAX}; #endif #ifdef DEBUG MainThreadOrGCTaskData elementsRangesAreValid; friend class js::GCMarker; #endif friend class MarkStackIter; }; static_assert(unsigned(SlotsOrElementsKind::Unused) == unsigned(MarkStack::SlotsOrElementsRangeTag), "To split the mark stack we depend on being able to tell the " "difference between SlotsOrElementsRange::startAndKind_ and a " "tagged SlotsOrElementsRange"); class MOZ_STACK_CLASS MarkStackIter { MarkStack& stack_; size_t pos_; public: explicit MarkStackIter(MarkStack& stack); bool done() const; void next(); MarkStack::Tag peekTag() const; MarkStack::TaggedPtr peekPtr() const; bool isSlotsOrElementsRange() const; MarkStack::SlotsOrElementsRange slotsOrElementsRange() const; void setSlotsOrElementsRange(const MarkStack::SlotsOrElementsRange& range); private: size_t position() const; }; // Bitmask of options to parameterize MarkingTracerT. namespace MarkingOptions { enum : uint32_t { None = 0, // Set the compartment's hasMarkedCells flag for roots. MarkRootCompartments = 1, // The marking tracer is operating in parallel. Use appropriate atomic // accesses to update the mark bits correctly. ParallelMarking = 2, // Mark any implicit edges if we are in weak marking mode. MarkImplicitEdges = 4, }; } // namespace MarkingOptions // A default set of marking options that works during normal marking and weak // marking modes. Used for barriers and testing code. constexpr uint32_t NormalMarkingOptions = MarkingOptions::MarkImplicitEdges; template class MarkingTracerT : public GenericTracerImpl> { public: MarkingTracerT(JSRuntime* runtime, GCMarker* marker); virtual ~MarkingTracerT() = default; template void onEdge(T** thingp, const char* name); friend class GenericTracerImpl>; GCMarker* getMarker(); }; using MarkingTracer = MarkingTracerT; using RootMarkingTracer = MarkingTracerT; using WeakMarkingTracer = MarkingTracerT; using ParallelMarkingTracer = MarkingTracerT; enum ShouldReportMarkTime : bool { ReportMarkTime = true, DontReportMarkTime = false }; } /* namespace gc */ class GCMarker { enum MarkingState : uint8_t { // Have not yet started marking. NotActive, // Root marking mode. This sets the hasMarkedCells flag on compartments // containing objects and scripts, which is used to make sure we clean up // dead compartments. RootMarking, // Main marking mode. Weakmap marking will be populating the // gcEphemeronEdges tables but not consulting them. The state will // transition to WeakMarking until it is done, then back to RegularMarking. RegularMarking, // Like RegularMarking but with multiple threads running in parallel. ParallelMarking, // Same as RegularMarking except now every marked obj/script is immediately // looked up in the gcEphemeronEdges table to find edges generated by // weakmap keys, and traversing them to their values. Transitions back to // RegularMarking when done. WeakMarking, }; public: explicit GCMarker(JSRuntime* rt); [[nodiscard]] bool init(); JSRuntime* runtime() { return runtime_; } JSTracer* tracer() { return tracer_.match([](auto& t) -> JSTracer* { return &t; }); } #ifdef JS_GC_ZEAL void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); } #endif bool isActive() const { return state != NotActive; } bool isRegularMarking() const { return state == RegularMarking; } bool isParallelMarking() const { return state == ParallelMarking; } bool isWeakMarking() const { return state == WeakMarking; } gc::MarkColor markColor() const { return markColor_; } bool isDrained() const { return stack.isEmpty() && otherStack.isEmpty(); } bool hasEntriesForCurrentColor() { return stack.hasEntries(); } bool hasBlackEntries() const { return hasEntries(gc::MarkColor::Black); } bool hasGrayEntries() const { return hasEntries(gc::MarkColor::Gray); } bool hasEntries(gc::MarkColor color) const; bool canDonateWork() const; bool shouldDonateWork() const; void start(); void stop(); void reset(); [[nodiscard]] bool markUntilBudgetExhausted( JS::SliceBudget& budget, gc::ShouldReportMarkTime reportTime = gc::ReportMarkTime); void setRootMarkingMode(bool newState); bool enterWeakMarkingMode(); void leaveWeakMarkingMode(); void enterParallelMarkingMode(); void leaveParallelMarkingMode(); // Do not use linear-time weak marking for the rest of this collection. // Currently, this will only be triggered by an OOM when updating needed data // structures. void abortLinearWeakMarking(); #ifdef DEBUG // We can't check atom marking if the helper thread lock is already held by // the current thread. This allows us to disable the check. void setCheckAtomMarking(bool check); bool shouldCheckCompartments() { return strictCompartmentChecking; } bool markOneObjectForTest(JSObject* obj); #endif bool markCurrentColorInParallel(gc::ParallelMarkTask* task, JS::SliceBudget& budget); template bool markOneColor(JS::SliceBudget& budget); static size_t moveWork(GCMarker* dst, GCMarker* src, bool allowDistribute); [[nodiscard]] bool initStack(); void resetStackCapacity(); void freeStack(); size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const; static GCMarker* fromTracer(JSTracer* trc) { MOZ_ASSERT(trc->isMarkingTracer()); auto* marker = reinterpret_cast(uintptr_t(trc) - offsetof(GCMarker, tracer_)); MOZ_ASSERT(marker->tracer() == trc); return marker; } // Internal public methods, for ease of use by the rest of the GC: // If |thing| is unmarked, mark it and then traverse its children. template void markAndTraverse(T* thing); template void markImplicitEdges(T* markedThing); private: /* * Care must be taken changing the mark color from gray to black. The cycle * collector depends on the invariant that there are no black to gray edges * in the GC heap. This invariant lets the CC not trace through black * objects. If this invariant is violated, the cycle collector may free * objects that are still reachable. */ void setMarkColor(gc::MarkColor newColor); friend class js::gc::AutoSetMarkColor; template void setMarkingStateAndTracer(MarkingState prev, MarkingState next); // The mutator can shift object elements which could invalidate any elements // index on the mark stack. Change the index to be relative to the elements // allocation (to ignore shifted elements) while the mutator is running. void updateRangesAtStartOfSlice(); void updateRangesAtEndOfSlice(); friend class gc::AutoUpdateMarkStackRanges; template bool processMarkStackTop(JS::SliceBudget& budget); friend class gc::GCRuntime; // Helper methods that coerce their second argument to the base pointer // type. template void markAndTraverseObjectEdge(S source, JSObject* target) { markAndTraverseEdge(source, target); } template void markAndTraverseStringEdge(S source, JSString* target) { markAndTraverseEdge(source, target); } template void markAndTraverseEdge(S* source, T* target); template void markAndTraverseEdge(S* source, const T& target); template bool markAndTraversePrivateGCThing(JSObject* source, gc::Cell* target); template bool markAndTraverseSymbol(JSObject* source, JS::Symbol* target); template void checkTraversedEdge(S source, T* target); // Mark the given GC thing, but do not trace its children. Return true // if the thing became marked. template [[nodiscard]] bool mark(T* thing); // Traverse a GC thing's children, using a strategy depending on the type. // This can either processing them immediately or push them onto the mark // stack for later. #define DEFINE_TRAVERSE_METHOD(_1, Type, _2, _3) \ template \ void traverse(Type* thing); JS_FOR_EACH_TRACEKIND(DEFINE_TRAVERSE_METHOD) #undef DEFINE_TRAVERSE_METHOD // Process a marked thing's children by calling T::traceChildren(). template void traceChildren(T* thing); // Process a marked thing's children recursively using an iterative loop and // manual dispatch, for kinds where this is possible. template void scanChildren(T* thing); // Push a marked thing onto the mark stack. Its children will be marked later. template void pushThing(T* thing); template void eagerlyMarkChildren(JSLinearString* str); template void eagerlyMarkChildren(JSRope* rope); template void eagerlyMarkChildren(JSString* str); template void eagerlyMarkChildren(Shape* shape); template void eagerlyMarkChildren(PropMap* map); template void eagerlyMarkChildren(Scope* scope); template inline void pushTaggedPtr(T* ptr); inline void pushValueRange(JSObject* obj, SlotsOrElementsKind kind, size_t start, size_t end); // Mark through edges whose target color depends on the colors of two source // entities (eg a WeakMap and one of its keys), and push the target onto the // mark stack. void markEphemeronEdges(gc::EphemeronEdgeVector& edges, gc::MarkColor srcColor); friend class JS::Zone; #ifdef DEBUG void checkZone(gc::Cell* cell); #else void checkZone(gc::Cell* cell) {} #endif template bool doMarking(JS::SliceBudget& budget, gc::ShouldReportMarkTime reportTime); void delayMarkingChildrenOnOOM(gc::Cell* cell); /* * The JSTracer used for marking. This can change depending on the current * state. */ mozilla::Variant tracer_; JSRuntime* const runtime_; // The main mark stack, holding entries of color |markColor_|. gc::MarkStack stack; // The auxiliary mark stack, which may contain entries of the other color. gc::MarkStack otherStack; // Track whether we're using the main or auxiliary stack. MainThreadOrGCTaskData haveSwappedStacks; // The current mark stack color. MainThreadOrGCTaskData markColor_; Vector unmarkGrayStack; friend class gc::UnmarkGrayTracer; /* Track the state of marking. */ MainThreadOrGCTaskData state; /* Whether we successfully added all edges to the implicit edges table. */ MainThreadOrGCTaskData haveAllImplicitEdges; public: /* * Whether weakmaps can be marked incrementally. * * JSGC_INCREMENTAL_WEAKMAP_ENABLED * pref: javascript.options.mem.incremental_weakmap */ MainThreadOrGCTaskData incrementalWeakMapMarkingEnabled; /* Random number generator state. */ MainThreadOrGCTaskData random; #ifdef DEBUG private: /* Assert that start and stop are called with correct ordering. */ MainThreadOrGCTaskData started; /* * Whether to check that atoms traversed are present in atom marking * bitmap. */ MainThreadOrGCTaskData checkAtomMarking; /* * If this is true, all marked objects must belong to a compartment being * GCed. This is used to look for compartment bugs. */ MainThreadOrGCTaskData strictCompartmentChecking; public: /* * The compartment and zone of the object whose trace hook is currently being * called, if any. Used to catch cross-compartment edges traced without use of * TraceCrossCompartmentEdge. */ MainThreadOrGCTaskData tracingCompartment; MainThreadOrGCTaskData tracingZone; #endif // DEBUG }; namespace gc { /* * Temporarily change the mark color while this class is on the stack. * * During incremental sweeping this also transitions zones in the * current sweep group into the Mark or MarkGray state as appropriate. */ class MOZ_RAII AutoSetMarkColor { GCMarker& marker_; MarkColor initialColor_; public: AutoSetMarkColor(GCMarker& marker, MarkColor newColor) : marker_(marker), initialColor_(marker.markColor()) { marker_.setMarkColor(newColor); } AutoSetMarkColor(GCMarker& marker, CellColor newColor) : AutoSetMarkColor(marker, AsMarkColor(newColor)) {} ~AutoSetMarkColor() { marker_.setMarkColor(initialColor_); } }; } /* namespace gc */ } /* namespace js */ #endif /* gc_GCMarker_h */