#pragma once
#ifndef __AUTO_THREAD__
#define __AUTO_THREAD__
#include "Definitions.h"
#include "PointerHash.h"
#include "Locks.h"
#include "Subzone.h"
#include "AllocationCache.h"
namespace Auto {
class Zone;
class ThreadLocalCollector;
class LocalBlocksHash : public PointerHash {
public:
enum {
FlagScanned = 0x1,
FlagMarked = 0x2,
};
LocalBlocksHash(int initialCapacity) : PointerHash(initialCapacity) {}
inline void setScanned(uint32_t index) { setFlag(index, FlagScanned); }
inline void setScanned(void *p) { int32_t i = slotIndex(p); if (i != -1) setScanned(i); }
inline bool wasScanned(uint32_t index) { return flagSet(index, FlagScanned); }
inline void setMarked(uint32_t index) { setFlag(index, FlagMarked); }
inline void setMarked(void *p) { int32_t i = slotIndex(p); if (i != -1) setMarked(i); }
inline bool wasMarked(uint32_t index) { return flagSet(index, FlagMarked); }
inline bool testAndSetMarked(uint32_t index) {
bool old = wasMarked(index);
if (!old) setMarked(index);
return old;
}
inline void *markedPointerAtIndex(uint32_t index) {
vm_address_t value = _pointers[index];
void *pointer = (void *) (value & ~FlagsMask);
return ((value & FlagMarked) ? pointer : NULL);
}
inline void *unmarkedPointerAtIndex(uint32_t index) {
vm_address_t value = _pointers[index];
void *pointer = (void *) (value & ~FlagsMask);
return ((value & FlagMarked) ? NULL : ((value == (vm_address_t)RemovedEntry) ? NULL : pointer));
}
inline void *markedUnscannedPointerAtIndex(uint32_t index) {
vm_address_t value = _pointers[index];
void *pointer = (void *) (value & ~FlagsMask);
return ((value & (FlagMarked|FlagScanned)) == FlagMarked ? pointer : NULL);
}
inline void clearFlagsRehash() { rehash(FlagScanned | FlagMarked); }
inline void clearFlagsCompact() { compact(FlagScanned | FlagMarked); }
inline bool isFull() { return count() >= local_allocations_size_limit; }
inline size_t localsSize() {
size_t size = 0;
for (uint32_t i = firstOccupiedSlot(), last = lastOccupiedSlot(); i <= last; i++) {
void *block = (*this)[i];
if (block) {
Subzone *subzone = Subzone::subzone(block);
usword_t q = subzone->quantum_index_unchecked(block);
size += subzone->size(q);
}
}
return size;
}
};
class NonVolatileRegisters {
private:
#if defined(__i386__)
usword_t _registers[5];
static inline void capture_registers(register usword_t *registers) {
__asm__ volatile ("mov %%ebx, 0(%[registers]) \n"
"mov %%ebp, 4(%[registers]) \n"
"mov %%esp, 8(%[registers]) \n"
"mov %%esi, 12(%[registers]) \n"
"mov %%edi, 16(%[registers]) \n"
: : [registers] "a" (registers) : "memory");
}
#elif defined(__x86_64__)
usword_t _registers[7];
static inline void capture_registers(register usword_t *registers) {
__asm__ volatile ("movq %%rbx, 0(%[registers]) \n"
"movq %%rsp, 8(%[registers]) \n"
"movq %%rbp, 16(%[registers]) \n"
"movq %%r12, 24(%[registers]) \n"
"movq %%r13, 32(%[registers]) \n"
"movq %%r14, 40(%[registers]) \n"
"movq %%r15, 48(%[registers]) \n"
: : [registers] "a" (registers) : "memory");
}
#elif defined(__arm__)
usword_t _registers[8];
static inline void capture_registers(register usword_t *registers) {
__asm__ volatile ("stmia %[registers], {r4-r11}"
: : [registers] "r" (registers) : "memory");
}
#else
#error Unknown Architecture
#endif
public:
NonVolatileRegisters() { capture_registers(_registers); }
inline Range buffer_range() { return Range(_registers, sizeof(_registers)); }
};
union ThreadState;
class Thread : public AuxAllocated {
private:
Thread *_next; Zone *_zone; pthread_t _pthread; mach_port_t _thread; void *_stack_base; LockedBoolean _scanning; uint32_t _suspended; void *_stack_scan_peak; ThreadLocalCollector *_tlc; AllocationCache _allocation_cache[2];
LocalBlocksHash _localAllocations; sentinel_t _localsGuard;
LockedBoolean _needs_enlivening; int32_t _destructor_count;
bool _in_collector; uint32_t _tlc_watchdog_counter; LockedBoolean _in_compaction; Subzone::PendingCountAccumulator *_pending_count_accumulator;
void *_tlc_buffer[local_allocations_size_limit];
void get_register_state(ThreadState &state, unsigned &user_count);
inline void remove_local(void *block) {
Sentinel guard(_localsGuard);
_localAllocations.remove(block);
}
void flush_cache(AllocationCache &cache);
template <class BlockRef> void block_escaped_internal(BlockRef block);
public:
Thread(Zone *zone);
~Thread();
void bind();
bool unbind();
bool lockForScanning();
void unlockForScanning();
inline Thread *next() { return _next; }
inline Zone *zone() { return _zone; }
inline pthread_t pthread() { return _pthread; }
inline mach_port_t thread() { return _thread; }
inline void set_next(Thread *next) { _next = next; }
inline AllocationCache &allocation_cache(const usword_t layout) { return _allocation_cache[layout & AUTO_UNSCANNED]; }
inline void *stack_base() { return _stack_base; }
inline LocalBlocksHash &locals() { return _localAllocations; }
inline sentinel_t &localsGuard() { return _localsGuard; }
inline bool is_bound() { return _pthread != NULL; }
inline int32_t increment_tsd_count() { return ++_destructor_count; }
inline void set_in_collector(bool value) { _in_collector = value; }
inline bool in_collector() const { return _in_collector; }
inline void set_thread_local_collector(ThreadLocalCollector *c) { _tlc = c; }
inline ThreadLocalCollector *thread_local_collector() { return _tlc; }
inline void **tlc_buffer() { return _tlc_buffer; }
inline bool tlc_watchdog_should_trigger() { return _tlc_watchdog_counter == 4; }
inline void tlc_watchdog_disable() { _tlc_watchdog_counter = 5; }
inline void tlc_watchdog_reset() { _tlc_watchdog_counter = 0; }
inline void tlc_watchdog_tickle() { if (_tlc_watchdog_counter < 4) _tlc_watchdog_counter++; }
inline void set_pending_count_accumulator(Subzone::PendingCountAccumulator *a) { _pending_count_accumulator = a; }
inline Subzone::PendingCountAccumulator *pending_count_accumulator() const { return _pending_count_accumulator; }
LockedBoolean &needs_enlivening() { return _needs_enlivening; }
void enliven_block(void *block);
LockedBoolean &in_compaction() { return _in_compaction; }
void clear_stack();
inline bool is_stack_address(void *address) {
Range stack(__builtin_frame_address(0), _stack_base);
return (stack.in_range(address));
}
template <class BlockRef> inline void block_escaped(BlockRef block) {
if (block.is_thread_local()) block_escaped_internal(block);
}
template <class DestBlock, class ValueBlock> void track_local_assignment(DestBlock dst, ValueBlock value)
{
bool blockStayedLocal = false;
if (value.is_thread_local()) {
if (dst.is_live_thread_local()) {
dst.set_scan_local_block();
blockStayedLocal = true;
}
if (!blockStayedLocal) {
block_escaped_internal(value);
}
}
}
void track_local_memcopy(const void *src, void *dst, size_t size);
void add_local_allocation(void *block) {
Sentinel guard(_localsGuard);
if (_localAllocations.isFull())
flush_local_blocks();
_localAllocations.add(block);
}
void flush_local_blocks();
void reap_all_local_blocks();
#ifdef __BLOCKS__
typedef void (^thread_scanner_t) (Thread *thread, const Range &range);
#else
class thread_scanner {
public:
virtual void operator() (Thread *thread, const Range &range) = 0;
};
typedef thread_scanner &thread_scanner_t;
#endif
void scan_current_thread(thread_scanner_t scanner, void *stack_bottom);
void scan_current_thread(void (*scanner) (Thread*, const Range&, void*), void *arg, void *stack_bottom);
void scan_other_thread(thread_scanner_t scanner, bool withSuspend);
void scan_other_thread(void (*scanner) (Thread*, const Range&, void*), void *arg, bool withSuspend);
#ifdef __BLOCKS__
void dump(auto_zone_stack_dump stack_dump, auto_zone_register_dump register_dump, auto_zone_node_dump dump_local_block);
void visit(auto_zone_visitor_t *visitor);
#endif
inline bool is_current_thread() const {
return pthread_self() == _pthread;
}
bool thread_cache_add(void *block, Subzone *subzone, usword_t q);
inline void unlink(Thread **link) {
for (Thread *t = *link; t; link = &t->_next, t = *link) {
if (t == this) {
*link = t->_next;
break;
}
}
}
static void scavenge_threads(Thread **active_link, Thread **inactive_link) {
while (Thread *thread = *active_link) {
SpinLock lock(&thread->_scanning.lock);
if (!thread->is_bound()) {
*active_link = thread->_next;
thread->_next = *inactive_link;
*inactive_link = thread;
} else {
active_link = &thread->_next;
}
}
}
void suspend();
void resume();
bool suspended() { return _suspended != 0; }
char *description(char *buf, size_t bufsz);
};
template <> void Thread::block_escaped<void *>(void *block);
};
#endif // __AUTO_THREAD__