#pragma once
#include "ArrayBuffer.h"
#include "CellState.h"
#include "CollectionScope.h"
#include "DeleteAllCodeEffort.h"
#include "GCIncomingRefCountedSet.h"
#include "HandleSet.h"
#include "HandleStack.h"
#include "HeapObserver.h"
#include "ListableHandler.h"
#include "MachineStackMarker.h"
#include "MarkedBlock.h"
#include "MarkedBlockSet.h"
#include "MarkedSpace.h"
#include "MutatorState.h"
#include "Options.h"
#include "StructureIDTable.h"
#include "TinyBloomFilter.h"
#include "UnconditionalFinalizer.h"
#include "VisitRaceKey.h"
#include "WeakHandleOwner.h"
#include "WeakReferenceHarvester.h"
#include "WriteBarrierSupport.h"
#include <wtf/AutomaticThread.h>
#include <wtf/Deque.h>
#include <wtf/HashCountedSet.h>
#include <wtf/HashSet.h>
#include <wtf/ParallelHelperPool.h>
namespace JSC {
class CodeBlock;
class CodeBlockSet;
class GCDeferralContext;
class EdenGCActivityCallback;
class ExecutableBase;
class FullGCActivityCallback;
class GCActivityCallback;
class GCAwareJITStubRoutine;
class Heap;
class HeapProfiler;
class HeapVerifier;
class HelpingGCScope;
class IncrementalSweeper;
class JITStubRoutine;
class JITStubRoutineSet;
class JSCell;
class JSValue;
class LLIntOffsetsExtractor;
class MarkStackArray;
class MarkedAllocator;
class MarkedArgumentBuffer;
class MarkingConstraint;
class MarkingConstraintSet;
class MutatorScheduler;
class SlotVisitor;
class SpaceTimeMutatorScheduler;
class StopIfNecessaryTimer;
class VM;
namespace DFG {
class SpeculativeJIT;
class Worklist;
}
static void* const zombifiedBits = reinterpret_cast<void*>(static_cast<uintptr_t>(0xdeadbeef));
typedef HashCountedSet<JSCell*> ProtectCountSet;
typedef HashCountedSet<const char*> TypeCountSet;
enum HeapType { SmallHeap, LargeHeap };
class HeapUtil;
class Heap {
WTF_MAKE_NONCOPYABLE(Heap);
public:
friend class JIT;
friend class DFG::SpeculativeJIT;
static Heap* heap(const JSValue); static Heap* heap(const HeapCell*);
static const unsigned s_timeCheckResolution = 16;
static bool isMarked(const void*);
static bool isMarkedConcurrently(const void*);
static bool testAndSetMarked(HeapVersion, const void*);
static size_t cellSize(const void*);
void writeBarrier(const JSCell* from);
void writeBarrier(const JSCell* from, JSValue to);
void writeBarrier(const JSCell* from, JSCell* to);
void writeBarrierWithoutFence(const JSCell* from);
void mutatorFence();
JS_EXPORT_PRIVATE void writeBarrierSlowPath(const JSCell* from);
Heap(VM*, HeapType);
~Heap();
void lastChanceToFinalize();
void releaseDelayedReleasedObjects();
VM* vm() const;
MarkedSpace& objectSpace() { return m_objectSpace; }
MachineThreads& machineThreads() { return m_machineThreads; }
SlotVisitor& collectorSlotVisitor() { return *m_collectorSlotVisitor; }
JS_EXPORT_PRIVATE GCActivityCallback* fullActivityCallback();
JS_EXPORT_PRIVATE GCActivityCallback* edenActivityCallback();
JS_EXPORT_PRIVATE void setGarbageCollectionTimerEnabled(bool);
JS_EXPORT_PRIVATE IncrementalSweeper* sweeper();
void addObserver(HeapObserver* observer) { m_observers.append(observer); }
void removeObserver(HeapObserver* observer) { m_observers.removeFirst(observer); }
MutatorState mutatorState() const { return m_mutatorState; }
std::optional<CollectionScope> collectionScope() const { return m_collectionScope; }
bool hasHeapAccess() const;
bool mutatorIsStopped() const;
bool collectorBelievesThatTheWorldIsStopped() const;
JS_EXPORT_PRIVATE bool isCurrentThreadBusy();
typedef void (*Finalizer)(JSCell*);
JS_EXPORT_PRIVATE void addFinalizer(JSCell*, Finalizer);
void addExecutable(ExecutableBase*);
void notifyIsSafeToCollect();
bool isSafeToCollect() const { return m_isSafeToCollect; }
JS_EXPORT_PRIVATE bool isHeapSnapshotting() const;
JS_EXPORT_PRIVATE void collectAllGarbageIfNotDoneRecently();
JS_EXPORT_PRIVATE void collectAllGarbage();
bool shouldCollectHeuristic();
JS_EXPORT_PRIVATE void collectAsync(std::optional<CollectionScope> = std::nullopt);
JS_EXPORT_PRIVATE void collectSync(std::optional<CollectionScope> = std::nullopt);
void collectIfNecessaryOrDefer(GCDeferralContext* = nullptr);
void completeAllJITPlans();
void reportExtraMemoryAllocated(size_t);
JS_EXPORT_PRIVATE void reportExtraMemoryVisited(size_t);
#if ENABLE(RESOURCE_USAGE)
JS_EXPORT_PRIVATE void reportExternalMemoryVisited(size_t);
size_t externalMemorySize() { return m_externalMemorySize; }
#endif
void deprecatedReportExtraMemory(size_t);
JS_EXPORT_PRIVATE void reportAbandonedObjectGraph();
JS_EXPORT_PRIVATE void protect(JSValue);
JS_EXPORT_PRIVATE bool unprotect(JSValue);
JS_EXPORT_PRIVATE size_t extraMemorySize(); JS_EXPORT_PRIVATE size_t size();
JS_EXPORT_PRIVATE size_t capacity();
JS_EXPORT_PRIVATE size_t objectCount();
JS_EXPORT_PRIVATE size_t globalObjectCount();
JS_EXPORT_PRIVATE size_t protectedObjectCount();
JS_EXPORT_PRIVATE size_t protectedGlobalObjectCount();
JS_EXPORT_PRIVATE std::unique_ptr<TypeCountSet> protectedObjectTypeCounts();
JS_EXPORT_PRIVATE std::unique_ptr<TypeCountSet> objectTypeCounts();
HashSet<MarkedArgumentBuffer*>& markListSet();
template<typename Functor> void forEachProtectedCell(const Functor&);
template<typename Functor> void forEachCodeBlock(const Functor&);
template<typename Functor> void forEachCodeBlockIgnoringJITPlans(const Functor&);
HandleSet* handleSet() { return &m_handleSet; }
HandleStack* handleStack() { return &m_handleStack; }
void willStartIterating();
void didFinishIterating();
double lastFullGCLength() const { return m_lastFullGCLength; }
double lastEdenGCLength() const { return m_lastEdenGCLength; }
void increaseLastFullGCLength(double amount) { m_lastFullGCLength += amount; }
size_t sizeBeforeLastEdenCollection() const { return m_sizeBeforeLastEdenCollect; }
size_t sizeAfterLastEdenCollection() const { return m_sizeAfterLastEdenCollect; }
size_t sizeBeforeLastFullCollection() const { return m_sizeBeforeLastFullCollect; }
size_t sizeAfterLastFullCollection() const { return m_sizeAfterLastFullCollect; }
void deleteAllCodeBlocks(DeleteAllCodeEffort);
void deleteAllUnlinkedCodeBlocks(DeleteAllCodeEffort);
void didAllocate(size_t);
bool isPagedOut(double deadline);
const JITStubRoutineSet& jitStubRoutines() { return *m_jitStubRoutines; }
void addReference(JSCell*, ArrayBuffer*);
bool isDeferred() const { return !!m_deferralDepth; }
StructureIDTable& structureIDTable() { return m_structureIDTable; }
CodeBlockSet& codeBlockSet() { return *m_codeBlocks; }
#if USE(FOUNDATION)
template<typename T> void releaseSoon(RetainPtr<T>&&);
#endif
static bool isZombified(JSCell* cell) { return *(void**)cell == zombifiedBits; }
JS_EXPORT_PRIVATE void registerWeakGCMap(void* weakGCMap, std::function<void()> pruningCallback);
JS_EXPORT_PRIVATE void unregisterWeakGCMap(void* weakGCMap);
void addLogicallyEmptyWeakBlock(WeakBlock*);
#if ENABLE(RESOURCE_USAGE)
size_t blockBytesAllocated() const { return m_blockBytesAllocated; }
#endif
void didAllocateBlock(size_t capacity);
void didFreeBlock(size_t capacity);
bool mutatorShouldBeFenced() const { return m_mutatorShouldBeFenced; }
const bool* addressOfMutatorShouldBeFenced() const { return &m_mutatorShouldBeFenced; }
unsigned barrierThreshold() const { return m_barrierThreshold; }
const unsigned* addressOfBarrierThreshold() const { return &m_barrierThreshold; }
bool hasAccess() const;
void acquireAccess();
void releaseAccess();
void stopIfNecessary();
bool mayNeedToStop();
void preventCollection();
void allowCollection();
size_t bytesVisited();
uint64_t mutatorExecutionVersion() const { return m_mutatorExecutionVersion; }
JS_EXPORT_PRIVATE void addMarkingConstraint(std::unique_ptr<MarkingConstraint>);
size_t numOpaqueRoots() const { return m_opaqueRoots.size(); }
#if USE(CF)
CFRunLoopRef runLoop() const { return m_runLoop.get(); }
JS_EXPORT_PRIVATE void setRunLoop(CFRunLoopRef);
#endif // USE(CF)
private:
friend class AllocatingScope;
friend class CodeBlock;
friend class DeferGC;
friend class DeferGCForAWhile;
friend class GCAwareJITStubRoutine;
friend class GCLogging;
friend class GCThread;
friend class HandleSet;
friend class HeapUtil;
friend class HeapVerifier;
friend class HelpingGCScope;
friend class JITStubRoutine;
friend class LLIntOffsetsExtractor;
friend class MarkedSpace;
friend class MarkedAllocator;
friend class MarkedBlock;
friend class SlotVisitor;
friend class SpaceTimeMutatorScheduler;
friend class StochasticSpaceTimeMutatorScheduler;
friend class IncrementalSweeper;
friend class HeapStatistics;
friend class VM;
friend class WeakSet;
class Thread;
friend class Thread;
static const size_t minExtraMemory = 256;
class FinalizerOwner : public WeakHandleOwner {
void finalize(Handle<Unknown>, void* context) override;
};
JS_EXPORT_PRIVATE bool isValidAllocation(size_t);
JS_EXPORT_PRIVATE void reportExtraMemoryAllocatedSlowCase(size_t);
JS_EXPORT_PRIVATE void deprecatedReportExtraMemorySlowCase(size_t);
bool shouldCollectInThread(const LockHolder&);
void collectInThread();
void stopTheWorld();
void resumeTheWorld();
void stopTheMutator();
void resumeTheMutator();
void stopIfNecessarySlow();
bool stopIfNecessarySlow(unsigned extraStateBits);
template<typename Func>
void waitForCollector(const Func&);
JS_EXPORT_PRIVATE void acquireAccessSlow();
JS_EXPORT_PRIVATE void releaseAccessSlow();
bool handleGCDidJIT(unsigned);
bool handleNeedFinalize(unsigned);
void handleGCDidJIT();
void handleNeedFinalize();
void setGCDidJIT();
void setNeedFinalize();
void waitWhileNeedFinalize();
void setMutatorWaiting();
void clearMutatorWaiting();
void notifyThreadStopping(const LockHolder&);
typedef uint64_t Ticket;
Ticket requestCollection(std::optional<CollectionScope>);
void waitForCollection(Ticket);
void suspendCompilerThreads();
void willStartCollection(std::optional<CollectionScope>);
void prepareForMarking();
void markToFixpoint(double gcStartTime);
void gatherStackRoots(ConservativeRoots&);
void gatherJSStackRoots(ConservativeRoots&);
void gatherScratchBufferRoots(ConservativeRoots&);
void beginMarking();
void visitCompilerWorklistWeakReferences();
void removeDeadCompilerWorklistEntries();
void updateObjectCounts(double gcStartTime);
void endMarking();
void reapWeakHandles();
void pruneStaleEntriesFromWeakGCMaps();
void sweepArrayBuffers();
void snapshotUnswept();
void deleteSourceProviderCaches();
void notifyIncrementalSweeper();
void harvestWeakReferences();
void finalizeUnconditionalFinalizers();
void clearUnmarkedExecutables();
void deleteUnmarkedCompiledCode();
JS_EXPORT_PRIVATE void addToRememberedSet(const JSCell*);
void updateAllocationLimits();
void didFinishCollection(double gcStartTime);
void resumeCompilerThreads();
void zombifyDeadObjects();
void gatherExtraHeapSnapshotData(HeapProfiler&);
void removeDeadHeapSnapshotNodes(HeapProfiler&);
void finalize();
void sweepLargeAllocations();
void sweepAllLogicallyEmptyWeakBlocks();
bool sweepNextLogicallyEmptyWeakBlock();
bool shouldDoFullCollection(std::optional<CollectionScope> requestedCollectionScope) const;
void incrementDeferralDepth();
void decrementDeferralDepth();
void decrementDeferralDepthAndGCIfNeeded();
JS_EXPORT_PRIVATE void decrementDeferralDepthAndGCIfNeededSlow();
size_t threadVisitCount();
size_t threadBytesVisited();
void forEachCodeBlockImpl(const ScopedLambda<bool(CodeBlock*)>&);
void forEachCodeBlockIgnoringJITPlansImpl(const ScopedLambda<bool(CodeBlock*)>&);
void setMutatorShouldBeFenced(bool value);
void addCoreConstraints();
template<typename Func>
void iterateExecutingAndCompilingCodeBlocks(const Func&);
template<typename Func>
void iterateExecutingAndCompilingCodeBlocksWithoutHoldingLocks(const Func&);
void assertSharedMarkStacksEmpty();
const HeapType m_heapType;
const size_t m_ramSize;
const size_t m_minBytesPerCycle;
size_t m_sizeAfterLastCollect;
size_t m_sizeAfterLastFullCollect;
size_t m_sizeBeforeLastFullCollect;
size_t m_sizeAfterLastEdenCollect;
size_t m_sizeBeforeLastEdenCollect;
size_t m_bytesAllocatedThisCycle;
size_t m_bytesAbandonedSinceLastFullCollect;
size_t m_maxEdenSize;
size_t m_maxHeapSize;
bool m_shouldDoFullCollection;
size_t m_totalBytesVisited;
size_t m_totalBytesVisitedThisCycle;
std::optional<CollectionScope> m_collectionScope;
std::optional<CollectionScope> m_lastCollectionScope;
MutatorState m_mutatorState { MutatorState::Running };
StructureIDTable m_structureIDTable;
MarkedSpace m_objectSpace;
GCIncomingRefCountedSet<ArrayBuffer> m_arrayBuffers;
size_t m_extraMemorySize;
size_t m_deprecatedExtraMemorySize;
HashSet<const JSCell*> m_copyingRememberedSet;
ProtectCountSet m_protectedValues;
std::unique_ptr<HashSet<MarkedArgumentBuffer*>> m_markListSet;
MachineThreads m_machineThreads;
std::unique_ptr<SlotVisitor> m_collectorSlotVisitor;
std::unique_ptr<MarkStackArray> m_mutatorMarkStack;
Lock m_raceMarkStackLock;
std::unique_ptr<MarkStackArray> m_raceMarkStack;
std::unique_ptr<MarkingConstraintSet> m_constraintSet;
Vector<std::unique_ptr<SlotVisitor>> m_parallelSlotVisitors;
Vector<SlotVisitor*> m_availableParallelSlotVisitors;
Lock m_parallelSlotVisitorLock;
template<typename Func>
void forEachSlotVisitor(const Func&);
HandleSet m_handleSet;
HandleStack m_handleStack;
std::unique_ptr<CodeBlockSet> m_codeBlocks;
std::unique_ptr<JITStubRoutineSet> m_jitStubRoutines;
FinalizerOwner m_finalizerOwner;
bool m_isSafeToCollect;
bool m_mutatorShouldBeFenced { Options::forceFencedBarrier() };
unsigned m_barrierThreshold { Options::forceFencedBarrier() ? tautologicalThreshold : blackThreshold };
VM* m_vm;
double m_lastFullGCLength;
double m_lastEdenGCLength;
Vector<ExecutableBase*> m_executables;
Vector<WeakBlock*> m_logicallyEmptyWeakBlocks;
size_t m_indexOfNextLogicallyEmptyWeakBlockToSweep { WTF::notFound };
#if USE(CF)
RetainPtr<CFRunLoopRef> m_runLoop;
#endif // USE(CF)
RefPtr<FullGCActivityCallback> m_fullActivityCallback;
RefPtr<GCActivityCallback> m_edenActivityCallback;
RefPtr<IncrementalSweeper> m_sweeper;
RefPtr<StopIfNecessaryTimer> m_stopIfNecessaryTimer;
Vector<HeapObserver*> m_observers;
unsigned m_deferralDepth;
bool m_didDeferGCWork { false };
std::unique_ptr<HeapVerifier> m_verifier;
#if USE(FOUNDATION)
Vector<RetainPtr<CFTypeRef>> m_delayedReleaseObjects;
unsigned m_delayedReleaseRecursionCount;
#endif
HashMap<void*, std::function<void()>> m_weakGCMaps;
Lock m_visitRaceLock;
Lock m_markingMutex;
Condition m_markingConditionVariable;
std::unique_ptr<MarkStackArray> m_sharedCollectorMarkStack;
std::unique_ptr<MarkStackArray> m_sharedMutatorMarkStack;
unsigned m_numberOfActiveParallelMarkers { 0 };
unsigned m_numberOfWaitingParallelMarkers { 0 };
bool m_parallelMarkersShouldExit { false };
Lock m_opaqueRootsMutex;
HashSet<const void*> m_opaqueRoots;
static const size_t s_blockFragmentLength = 32;
ListableHandler<WeakReferenceHarvester>::List m_weakReferenceHarvesters;
ListableHandler<UnconditionalFinalizer>::List m_unconditionalFinalizers;
ParallelHelperClient m_helperClient;
#if ENABLE(RESOURCE_USAGE)
size_t m_blockBytesAllocated { 0 };
size_t m_externalMemorySize { 0 };
#endif
std::unique_ptr<MutatorScheduler> m_scheduler;
static const unsigned shouldStopBit = 1u << 0u;
static const unsigned stoppedBit = 1u << 1u;
static const unsigned hasAccessBit = 1u << 2u;
static const unsigned gcDidJITBit = 1u << 3u; static const unsigned needFinalizeBit = 1u << 4u;
static const unsigned mutatorWaitingBit = 1u << 5u; Atomic<unsigned> m_worldState;
bool m_collectorBelievesThatTheWorldIsStopped { false };
MonotonicTime m_stopTime;
Deque<std::optional<CollectionScope>> m_requests;
Ticket m_lastServedTicket { 0 };
Ticket m_lastGrantedTicket { 0 };
bool m_threadShouldStop { false };
bool m_threadIsStopping { false };
bool m_mutatorDidRun { true };
uint64_t m_mutatorExecutionVersion { 0 };
Box<Lock> m_threadLock;
RefPtr<AutomaticThreadCondition> m_threadCondition; RefPtr<AutomaticThread> m_thread;
Lock m_collectContinuouslyLock;
Condition m_collectContinuouslyCondition;
bool m_shouldStopCollectingContinuously { false };
ThreadIdentifier m_collectContinuouslyThread { 0 };
MonotonicTime m_lastGCStartTime;
MonotonicTime m_lastGCEndTime;
MonotonicTime m_currentGCStartTime;
uintptr_t m_barriersExecuted { 0 };
};
}