#pragma once
#ifndef __AUTO_THREAD__
#define __AUTO_THREAD__
#include "AutoDefs.h"
#include "AutoPointerHash.h"
#include "AutoLock.h"
#include "AutoFreeList.h"
namespace Auto {
class MemoryScanner;
class Zone;
class LocalBlocksHash : public AutoPointerHash {
public:
enum {
FlagScanned = 0x1,
FlagMarked = 0x2,
LocalAllocationsLimit = 2000, };
LocalBlocksHash(int initialCapacity) : AutoPointerHash(initialCapacity) {}
inline void setScanned(uint32_t index) { setFlag(index, FlagScanned); }
inline void setScanned(void *p) { int32_t i = slotIndex(p); if (i != -1) setScanned(i); }
inline bool wasScanned(uint32_t index) { return flagSet(index, FlagScanned); }
inline void setMarked(uint32_t index) { setFlag(index, FlagMarked); }
inline void setMarked(void *p) { int32_t i = slotIndex(p); if (i != -1) setMarked(i); }
inline bool wasMarked(uint32_t index) { return flagSet(index, FlagMarked); }
inline bool testAndSetMarked(uint32_t index) {
bool old = wasMarked(index);
if (!old) setMarked(index);
return old;
}
inline void *markedPointerAtIndex(uint32_t index) {
vm_address_t value = _pointers[index];
void *pointer = (void *) (value & ~FlagsMask);
return ((value & FlagMarked) ? pointer : NULL);
}
inline void *unmarkedPointerAtIndex(uint32_t index) {
vm_address_t value = _pointers[index];
void *pointer = (void *) (value & ~FlagsMask);
return ((value & FlagMarked) ? NULL : ((value == (vm_address_t)RemovedEntry) ? NULL : pointer));
}
inline void *markedUnscannedPointerAtIndex(uint32_t index) {
vm_address_t value = _pointers[index];
void *pointer = (void *) (value & ~FlagsMask);
return ((value & (FlagMarked|FlagScanned)) == FlagMarked ? pointer : NULL);
}
inline void clearFlagsCompact() { compact(FlagScanned | FlagMarked); }
inline bool isFull() { return count() >= Environment::local_allocations_size_limit; }
};
class SimplePointerBuffer {
enum {
PointerCount = 32
};
int16_t _cursor;
int16_t _count;
SimplePointerBuffer *_overflow;
void *_pointers[PointerCount];
public:
SimplePointerBuffer() : _cursor(0), _count(0), _overflow(NULL) {}
~SimplePointerBuffer() { if (_overflow) delete _overflow; }
void reset();
void push(void *p);
void *pop();
int16_t count() const { return _count; }
};
#if defined(__ppc__) || defined(__ppc64__)
class NonVolatileRegisters {
private:
enum {
first_nonvolatile_register = 13, number_of_nonvolatile_registers = 32 - first_nonvolatile_register,
};
usword_t _registers[number_of_nonvolatile_registers];
static inline void capture_registers(register usword_t *registers) {
#if defined(__ppc__)
__asm__ volatile ("stmw r13,0(%[registers])" : : [registers] "b" (registers) : "memory");
#else
__asm__ volatile ("std r13,0(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r14,8(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r15,16(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r16,24(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r17,32(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r18,40(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r19,48(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r20,56(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r21,64(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r22,72(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r23,80(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r24,88(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r25,96(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r26,104(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r27,112(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r28,120(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r29,128(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r30,136(%[registers])" : : [registers] "b" (registers) : "memory");
__asm__ volatile ("std r31,144(%[registers])" : : [registers] "b" (registers) : "memory");
#endif
}
public:
NonVolatileRegisters() { capture_registers(_registers); }
inline Range buffer_range() { return Range(_registers, sizeof(_registers)); }
};
#elif defined(__i386__)
class NonVolatileRegisters {
private:
usword_t _registers[5];
static inline void capture_registers(register usword_t *registers) {
__asm__ volatile ("mov %%ebx, 0(%[registers]) \n"
"mov %%ebp, 4(%[registers]) \n"
"mov %%esp, 8(%[registers]) \n"
"mov %%esi, 12(%[registers]) \n"
"mov %%edi, 16(%[registers]) \n"
: : [registers] "a" (registers) : "memory");
}
public:
NonVolatileRegisters() { capture_registers(_registers); }
inline Range buffer_range() { return Range(_registers, sizeof(_registers)); }
};
#elif defined(__x86_64__)
class NonVolatileRegisters {
private:
usword_t _registers[7];
static inline void capture_registers(register usword_t *registers) {
__asm__ volatile ("movq %%rbx, 0(%[registers]) \n"
"movq %%rsp, 8(%[registers]) \n"
"movq %%rbp, 16(%[registers]) \n"
"movq %%r12, 24(%[registers]) \n"
"movq %%r13, 32(%[registers]) \n"
"movq %%r14, 40(%[registers]) \n"
"movq %%r15, 48(%[registers]) \n"
: : [registers] "a" (registers) : "memory");
}
public:
NonVolatileRegisters() { capture_registers(_registers); }
inline Range buffer_range() { return Range(_registers, sizeof(_registers)); }
};
#else
#error Unknown Architecture
#endif
class ThreadMemoryAllocator {
Zone *_zone;
public:
ThreadMemoryAllocator(Zone *zone) : _zone(zone) {}
void *allocate_memory(usword_t size);
void deallocate_memory(void *address, usword_t size);
void uncommit_memory(void *address, usword_t size) { Auto::uncommit_memory(address, size); }
void copy_memory(void *dest, void *source, usword_t size) { Auto::copy_memory(dest, source, size); }
};
class EnliveningQueue : public PointerQueue<ThreadMemoryAllocator> {
LockedBoolean _needs_enlivening;
public:
EnliveningQueue(ThreadMemoryAllocator allocator) : PointerQueue<ThreadMemoryAllocator>(allocator), _needs_enlivening() {}
LockedBoolean &needs_enlivening() { return _needs_enlivening; }
};
union ThreadState;
class Thread : public AuxAllocated {
private:
Thread *_next; Zone *_zone; pthread_t _pthread; mach_port_t _thread; void *_stack_base; LockedBoolean _scanning; uint32_t _suspended; void *_stack_scan_peak; FreeList _allocation_cache[maximum_quanta + 1];
LocalBlocksHash _localAllocations; uint32_t _localAllocationThreshold;
EnliveningQueue _enlivening_queue; int32_t _destructor_count;
bool _in_collector;
void get_register_state(ThreadState &state, unsigned &user_count);
inline void remove_local(void *block) {
_localAllocations.remove(block);
}
public:
Thread(Zone *zone);
~Thread();
void bind();
bool unbind();
bool lockForScanning();
void unlockForScanning();
inline Thread *next() { return _next; }
inline Zone *zone() { return _zone; }
inline pthread_t pthread() { return _pthread; }
inline mach_port_t thread() { return _thread; }
inline void set_next(Thread *next) { _next = next; }
inline FreeList &allocation_cache(usword_t index) { return _allocation_cache[index]; }
inline void *stack_base() { return _stack_base; }
inline LocalBlocksHash &locals() { return _localAllocations; }
inline uint32_t localAllocationThreshold() const { return _localAllocationThreshold; }
inline void setLocalAllocationThreshold(uint32_t threshold) { _localAllocationThreshold = threshold; }
inline bool is_bound() { return _pthread != NULL; }
inline int32_t increment_tsd_count() { return ++_destructor_count; }
inline void set_in_collector(bool value) { _in_collector = value; }
inline bool in_collector() const { return _in_collector; }
EnliveningQueue &enlivening_queue() { return _enlivening_queue; }
LockedBoolean &needs_enlivening() { return _enlivening_queue.needs_enlivening(); }
void clear_stack();
inline bool is_stack_address(void *address) {
Range stack(__builtin_frame_address(0), _stack_base);
return (stack.in_range(address));
}
void block_escaped(Zone *zone, Subzone *subzone, void *block);
void track_local_assignment(Zone *zone, void *dst, void *value);
void track_local_memcopy(Zone *zone, const void *src, void *dst, size_t size);
void add_local_allocation(void *block) {
if (_localAllocations.isFull())
flush_local_blocks();
_localAllocations.add(block);
}
void flush_local_blocks();
void scan_current_thread(MemoryScanner &scanner);
void scan_current_thread(void (^scanner) (Thread *thread, Range &range), void *stack_bottom);
void scan_current_thread(void (*scanner) (Thread *thread, Range&, void*), void *arg, void *stack_bottom) {
scan_current_thread(^(Thread *thread, Range &range) { scanner(thread, range, arg); }, stack_bottom);
}
void scan_other_thread(MemoryScanner &scanner, bool withSuspend);
void scan_other_thread(void (^scanner) (Thread *thread, Range &range), bool withSuspend);
void scan_other_thread(void (*scanner) (Thread *thread, Range&, void*), void *arg, bool withSuspend) {
scan_other_thread(^(Thread *thread, Range &range) { scanner(thread, range, arg); }, withSuspend);
}
void scan_thread(MemoryScanner &scanner, bool withSuspend) {
if (is_current_thread())
scan_current_thread(scanner);
else
scan_other_thread(scanner, withSuspend);
}
void dump(auto_zone_stack_dump stack_dump, auto_zone_register_dump register_dump, auto_zone_node_dump dump_local_block);
inline bool is_current_thread() const {
return pthread_self() == _pthread;
}
void thread_cache_add(void *block);
inline void unlink(Thread **link) {
for (Thread *t = *link; t; link = &t->_next, t = *link) {
if (t == this) {
*link = t->_next;
break;
}
}
}
static void scavenge_threads(Thread **active_link, Thread **inactive_link) {
while (Thread *thread = *active_link) {
SpinLock lock(&thread->_scanning.lock);
if (!thread->is_bound()) {
*active_link = thread->_next;
thread->_next = *inactive_link;
*inactive_link = thread;
} else {
active_link = &thread->_next;
}
}
}
void suspend();
void resume();
char *description(char *buf, size_t bufsz);
};
};
#endif // __AUTO_THREAD__