#pragma once
#include "BlockDirectory.h"
#include "IterationStatus.h"
#include "LargeAllocation.h"
#include "MarkedBlock.h"
#include "MarkedBlockSet.h"
#include <array>
#include <wtf/Bag.h>
#include <wtf/HashSet.h>
#include <wtf/Noncopyable.h>
#include <wtf/RetainPtr.h>
#include <wtf/SentinelLinkedList.h>
#include <wtf/SinglyLinkedListWithTail.h>
#include <wtf/Vector.h>
namespace JSC {
class CompleteSubspace;
class Heap;
class HeapIterationScope;
class LLIntOffsetsExtractor;
class Subspace;
class WeakSet;
typedef uint32_t HeapVersion;
class MarkedSpace {
WTF_MAKE_NONCOPYABLE(MarkedSpace);
public:
static constexpr size_t sizeStep = MarkedBlock::atomSize;
static constexpr size_t preciseCutoff = 80;
static constexpr size_t blockPayload = MarkedBlock::payloadSize;
static constexpr size_t largeCutoff = (blockPayload / 2) & ~(sizeStep - 1);
static constexpr size_t numSizeClasses = largeCutoff / sizeStep + 1;
static constexpr HeapVersion nullVersion = 0; static constexpr HeapVersion initialVersion = 2;
static HeapVersion nextVersion(HeapVersion version)
{
version++;
if (version == nullVersion)
version = initialVersion;
return version;
}
static size_t sizeClassToIndex(size_t size)
{
return (size + sizeStep - 1) / sizeStep;
}
static size_t indexToSizeClass(size_t index)
{
size_t result = index * sizeStep;
ASSERT(sizeClassToIndex(result) == index);
return result;
}
MarkedSpace(Heap*);
~MarkedSpace();
Heap* heap() const { return m_heap; }
void lastChanceToFinalize(); void freeMemory();
static size_t optimalSizeFor(size_t);
void prepareForAllocation();
void visitWeakSets(SlotVisitor&);
void reapWeakSets();
MarkedBlockSet& blocks() { return m_blocks; }
void willStartIterating();
bool isIterating() const { return m_isIterating; }
void didFinishIterating();
void stopAllocating();
void stopAllocatingForGood();
void resumeAllocating();
void prepareForMarking();
void prepareForConservativeScan();
typedef HashSet<MarkedBlock*>::iterator BlockIterator;
template<typename Functor> void forEachLiveCell(HeapIterationScope&, const Functor&);
template<typename Functor> void forEachDeadCell(HeapIterationScope&, const Functor&);
template<typename Functor> void forEachBlock(const Functor&);
void shrink();
void freeBlock(MarkedBlock::Handle*);
void freeOrShrinkBlock(MarkedBlock::Handle*);
void didAddBlock(MarkedBlock::Handle*);
void didConsumeFreeList(MarkedBlock::Handle*);
void didAllocateInBlock(MarkedBlock::Handle*);
void beginMarking();
void endMarking();
void snapshotUnswept();
void clearNewlyAllocated();
void sweep();
void sweepLargeAllocations();
void assertNoUnswept();
size_t objectCount();
size_t size();
size_t capacity();
bool isPagedOut(MonotonicTime deadline);
HeapVersion markingVersion() const { return m_markingVersion; }
HeapVersion newlyAllocatedVersion() const { return m_newlyAllocatedVersion; }
const Vector<LargeAllocation*>& largeAllocations() const { return m_largeAllocations; }
unsigned largeAllocationsNurseryOffset() const { return m_largeAllocationsNurseryOffset; }
unsigned largeAllocationsOffsetForThisCollection() const { return m_largeAllocationsOffsetForThisCollection; }
LargeAllocation** largeAllocationsForThisCollectionBegin() const { return m_largeAllocationsForThisCollectionBegin; }
LargeAllocation** largeAllocationsForThisCollectionEnd() const { return m_largeAllocationsForThisCollectionEnd; }
unsigned largeAllocationsForThisCollectionSize() const { return m_largeAllocationsForThisCollectionSize; }
BlockDirectory* firstDirectory() const { return m_directories.first(); }
Lock& directoryLock() { return m_directoryLock; }
void addBlockDirectory(const AbstractLocker&, BlockDirectory*);
bool isMarking() const { return m_isMarking; }
WeakSet* activeWeakSetsBegin() { return m_activeWeakSets.begin(); }
WeakSet* activeWeakSetsEnd() { return m_activeWeakSets.end(); }
WeakSet* newActiveWeakSetsBegin() { return m_newActiveWeakSets.begin(); }
WeakSet* newActiveWeakSetsEnd() { return m_newActiveWeakSets.end(); }
void dumpBits(PrintStream& = WTF::dataFile());
JS_EXPORT_PRIVATE static std::array<size_t, numSizeClasses> s_sizeClassForSizeStep;
private:
friend class CompleteSubspace;
friend class LLIntOffsetsExtractor;
friend class JIT;
friend class WeakSet;
friend class Subspace;
template<typename Functor> void forEachLiveCell(const Functor&);
static void initializeSizeClassForStepSize();
void initializeSubspace(Subspace&);
template<typename Functor> inline void forEachDirectory(const Functor&);
void addActiveWeakSet(WeakSet*);
Vector<Subspace*> m_subspaces;
Vector<LargeAllocation*> m_largeAllocations;
unsigned m_largeAllocationsNurseryOffset { 0 };
unsigned m_largeAllocationsOffsetForThisCollection { 0 };
unsigned m_largeAllocationsNurseryOffsetForSweep { 0 };
LargeAllocation** m_largeAllocationsForThisCollectionBegin { nullptr };
LargeAllocation** m_largeAllocationsForThisCollectionEnd { nullptr };
unsigned m_largeAllocationsForThisCollectionSize { 0 };
Heap* m_heap;
HeapVersion m_markingVersion { initialVersion };
HeapVersion m_newlyAllocatedVersion { initialVersion };
size_t m_capacity;
bool m_isIterating;
bool m_isMarking { false };
MarkedBlockSet m_blocks;
SentinelLinkedList<WeakSet, BasicRawSentinelNode<WeakSet>> m_activeWeakSets;
SentinelLinkedList<WeakSet, BasicRawSentinelNode<WeakSet>> m_newActiveWeakSets;
Lock m_directoryLock;
SinglyLinkedListWithTail<BlockDirectory> m_directories;
friend class HeapVerifier;
};
template <typename Functor> inline void MarkedSpace::forEachBlock(const Functor& functor)
{
forEachDirectory(
[&] (BlockDirectory& directory) -> IterationStatus {
directory.forEachBlock(functor);
return IterationStatus::Continue;
});
}
template <typename Functor>
void MarkedSpace::forEachDirectory(const Functor& functor)
{
for (BlockDirectory* directory = m_directories.first(); directory; directory = directory->nextDirectory()) {
if (functor(*directory) == IterationStatus::Done)
return;
}
}
ALWAYS_INLINE size_t MarkedSpace::optimalSizeFor(size_t bytes)
{
ASSERT(bytes);
if (bytes <= preciseCutoff)
return WTF::roundUpToMultipleOf<sizeStep>(bytes);
if (bytes <= largeCutoff)
return s_sizeClassForSizeStep[sizeClassToIndex(bytes)];
return bytes;
}
}