#include "config.h"
#include "ProbeStack.h"
#include <memory>
#include <wtf/StdLibExtras.h>
#if ENABLE(MASM_PROBE)
namespace JSC {
namespace Probe {
static void* const maxLowWatermark = reinterpret_cast<void*>(std::numeric_limits<uintptr_t>::max());
#if ASAN_ENABLED
SUPPRESS_ASAN
static void copyStackPage(void* dst, void* src, size_t size)
{
ASSERT(roundUpToMultipleOf<sizeof(uintptr_t)>(dst) == dst);
ASSERT(roundUpToMultipleOf<sizeof(uintptr_t)>(src) == src);
uintptr_t* dstPointer = reinterpret_cast<uintptr_t*>(dst);
uintptr_t* srcPointer = reinterpret_cast<uintptr_t*>(src);
for (; size; size -= sizeof(uintptr_t))
*dstPointer++ = *srcPointer++;
}
#else
#define copyStackPage(dst, src, size) std::memcpy(dst, src, size)
#endif
Page::Page(void* baseAddress)
: m_baseLogicalAddress(baseAddress)
, m_physicalAddressOffset(reinterpret_cast<uint8_t*>(&m_buffer) - reinterpret_cast<uint8_t*>(baseAddress))
{
copyStackPage(&m_buffer, baseAddress, s_pageSize);
}
void Page::flushWrites()
{
uint64_t dirtyBits = m_dirtyBits;
size_t offset = 0;
while (dirtyBits) {
if (dirtyBits & 1) {
size_t startOffset = offset;
do {
dirtyBits = dirtyBits >> 1;
offset += s_chunkSize;
} while (dirtyBits & 1);
size_t size = offset - startOffset;
uint8_t* src = reinterpret_cast<uint8_t*>(&m_buffer) + startOffset;
uint8_t* dst = reinterpret_cast<uint8_t*>(m_baseLogicalAddress) + startOffset;
copyStackPage(dst, src, size);
}
dirtyBits = dirtyBits >> 1;
offset += s_chunkSize;
}
m_dirtyBits = 0;
}
void* Page::lowWatermarkFromVisitingDirtyChunks()
{
uint64_t dirtyBits = m_dirtyBits;
size_t offset = 0;
while (dirtyBits) {
if (dirtyBits & 1)
return reinterpret_cast<uint8_t*>(m_baseLogicalAddress) + offset;
dirtyBits = dirtyBits >> 1;
offset += s_chunkSize;
}
return maxLowWatermark;
}
Stack::Stack(Stack&& other)
: m_stackBounds(WTFMove(other.m_stackBounds))
, m_pages(WTFMove(other.m_pages))
{
m_savedStackPointer = other.m_savedStackPointer;
#if !ASSERT_DISABLED
other.m_isValid = false;
#endif
}
bool Stack::hasWritesToFlush()
{
return std::any_of(m_pages.begin(), m_pages.end(), [] (auto& it) { return it.value->hasWritesToFlush(); });
}
void Stack::flushWrites()
{
for (auto it = m_pages.begin(); it != m_pages.end(); ++it)
it->value->flushWritesIfNeeded();
}
Page* Stack::ensurePageFor(void* address)
{
RELEASE_ASSERT(m_stackBounds.contains(address));
void* baseAddress = Page::baseAddressFor(address);
auto it = m_pages.find(baseAddress);
if (LIKELY(it != m_pages.end()))
m_lastAccessedPage = it->value.get();
else {
std::unique_ptr<Page> page = std::make_unique<Page>(baseAddress);
auto result = m_pages.add(baseAddress, WTFMove(page));
m_lastAccessedPage = result.iterator->value.get();
}
m_lastAccessedPageBaseAddress = baseAddress;
return m_lastAccessedPage;
}
void* Stack::lowWatermarkFromVisitingDirtyPages()
{
void* low = maxLowWatermark;
for (auto it = m_pages.begin(); it != m_pages.end(); ++it) {
Page& page = *it->value;
if (!page.hasWritesToFlush() || low < page.baseAddress())
continue;
low = std::min(low, page.lowWatermarkFromVisitingDirtyChunks());
}
return low;
}
} }
#endif // ENABLE(MASM_PROBE)