#pragma once
#ifndef __AUTO_ZONE_CORE__
#define __AUTO_ZONE_CORE__
#include "auto_zone.h"
#include "auto_impl_utilities.h"
#include "auto_weak.h"
#include "AutoBitmap.h"
#include "AutoConfiguration.h"
#include "AutoDefs.h"
#include "AutoLarge.h"
#include "AutoLock.h"
#include "AutoAdmin.h"
#include "AutoRegion.h"
#include "AutoStatistics.h"
#include "AutoSubzone.h"
#include "AutoThread.h"
#include <algorithm>
#include <cassert>
#define USE_DISPATCH_QUEUE 1
#if USE_DISPATCH_QUEUE
#include <dispatch/dispatch.h>
#endif
namespace Auto {
class Monitor;
class TrackedPageAllocator {
private:
Statistics &_stats;
public:
TrackedPageAllocator(Statistics &st) : _stats(st) {}
TrackedPageAllocator(const TrackedPageAllocator &allocator) : _stats(allocator._stats) {}
inline void *allocate_memory(usword_t size) {
_stats.add_admin(size);
return Auto::allocate_memory(size);
}
inline void deallocate_memory(void *address, usword_t size) {
_stats.add_admin(-(intptr_t)size);
Auto::deallocate_memory(address, size);
}
inline void uncommit_memory(void *address, usword_t size) {
Auto::uncommit_memory(address, size);
}
inline void copy_memory(void *dest, void *source, usword_t size) {
Auto::copy_memory(dest, source, size);
}
};
typedef PointerArray<TrackedPageAllocator> PointerList;
template <typename BarrierType>
class EnliveningHelper : public BarrierType {
Thread &_thread;
public:
EnliveningHelper(Thread &thread) : BarrierType(thread.enlivening_queue().needs_enlivening()), _thread(thread) {}
void enliven_block(void *block) { _thread.enlivening_queue().add(block); }
};
class ScanStack {
private:
void ** _address; void ** _end; void ** _cursor; void ** _highwater;
public:
ScanStack()
: _address(NULL)
, _end(NULL)
, _cursor(NULL)
, _highwater(NULL)
{}
inline void set_range(Range range) {
set_range(range.address(), range.end());
}
inline void set_range(void *address, void *end) {
_address = (void **)address;
_end = (void **)end;
_cursor = (void **)address;
_highwater = (void **)address;
}
inline void set_range(void *address, usword_t size) {
set_range(address, displace(address, size));
}
void reset() {
_cursor = _address;
_highwater = _address;
}
inline bool is_allocated() const {
return _address != NULL;
}
inline bool is_empty() const {
return _cursor == _address;
}
inline bool is_overflow() const {
return _cursor == _end;
}
inline void push(void *block) {
if (!is_overflow()) {
*_cursor++ = block;
if (_highwater < _cursor) _highwater = _cursor;
}
}
inline void *top() {
if (!is_empty() && !is_overflow()) return _cursor[-1];
return NULL;
}
inline void *pop() {
if (!is_empty() && !is_overflow()) return *--_cursor;
return NULL;
}
inline unsigned long max() {
return _highwater - _address;
}
};
typedef std::vector<Range, AuxAllocator<Range> > RangeVector;
class ObjectAssocationHashMap : public __gnu_cxx::hash_map<void *, void *, AuxPointerHash, AuxPointerEqual, AuxAllocator<void *> >, public AuxAllocated {};
typedef __gnu_cxx::hash_map<void *, ObjectAssocationHashMap*, AuxPointerHash, AuxPointerEqual, AuxAllocator<void *> > AssocationsHashMap;
enum State {
idle, scanning, enlivening, finalizing, reclaiming
};
class Zone {
friend class MemoryScanner;
#define INVALID_THREAD_KEY_VALUE ((Thread *)-1)
public:
malloc_zone_t basic_zone;
bool multithreaded;
volatile int32_t collector_disable_count; volatile int32_t collector_collecting; uint32_t collection_count;
auto_collection_control_t control;
auto_lock_t stats_lock; auto_statistics_t stats;
unsigned num_weak_refs;
unsigned max_weak_refs;
struct weak_entry_t *weak_refs_table;
spin_lock_t weak_refs_table_lock;
#if USE_DISPATCH_QUEUE
dispatch_queue_t collection_queue;
dispatch_block_t collection_block;
#else
pthread_t collection_thread;
pthread_cond_t collection_requested;
#endif
pthread_mutex_t collection_mutex;
volatile uint32_t collection_requested_mode;
pthread_cond_t collection_status;
volatile uint32_t collection_status_state;
private:
static volatile int32_t _zone_count; static Zone *_first_zone;
Thread *_registered_threads; pthread_key_t _registered_threads_key; pthread_mutex_t _registered_threads_mutex; bool _enlivening_enabled; bool _enlivening_complete;
pthread_mutex_t _mark_bits_mutex;
Bitmap _in_subzone; Bitmap _in_large; Large *_large_list; spin_lock_t _large_lock; PtrHashSet _roots; spin_lock_t _roots_lock; RangeVector _datasegments; spin_lock_t _datasegments_lock; PtrHashSet _zombies; spin_lock_t _zombies_lock; Region *_region_list; spin_lock_t _region_lock; PtrIntHashMap _retains; spin_lock_t _retains_lock; bool _repair_write_barrier; ScanStack _scan_stack; Range _coverage; spin_lock_t _coverage_lock; TrackedPageAllocator _page_allocator; Statistics _stats; int32_t _allocation_threshold; PointerList _garbage_list; AssocationsHashMap _associations; spin_lock_t _associations_lock; bool _scanning_associations; volatile enum State _state;
#if UseArena
void *_arena; void *_large_start; Bitmap _large_bits; spin_lock_t _large_bits_lock; #endif
Admin _small_admin; Admin _medium_admin;
void deallocate_large(void *block);
Region *allocate_region();
void *allocate_large(Thread &thread, usword_t &size, const unsigned layout, bool clear, bool refcount_is_one);
inline Large *find_large(void *block) { return Large::large(block); }
void deallocate_small_medium(void *block);
public:
#if UseArena
void *arena_allocate_region(usword_t newsize);
#endif
void *arena_allocate_large(usword_t size);
void arena_deallocate(void *, size_t size);
static inline const usword_t admin_offset() { return align(sizeof(Zone), page_size); }
static inline const usword_t bytes_needed() {
usword_t in_subzone_size = Bitmap::bytes_needed(subzone_quantum_max);
usword_t in_large_size = Bitmap::bytes_needed(allocate_quantum_large_max);
#if UseArena
usword_t arena_size = Bitmap::bytes_needed(allocate_quantum_large_max);
#else
usword_t arena_size = 0;
#endif
return admin_offset() + in_subzone_size + in_large_size + arena_size;
}
inline void *operator new(const size_t size) {
#if DEBUG
void *allocation_address = allocate_guarded_memory(bytes_needed());
#else
void *allocation_address = allocate_memory(bytes_needed());
#endif
if (!allocation_address) error("Can not allocate zone");
return allocation_address;
}
inline void operator delete(void *zone) {
#if DEBUG
if (zone) deallocate_guarded_memory(zone, bytes_needed());
#else
if (zone) deallocate_memory(zone, bytes_needed());
#endif
}
static void setup_shared();
static pthread_key_t allocate_thread_key();
Zone(pthread_key_t thread_registration_key);
~Zone();
static inline Zone *zone() { return _first_zone; }
inline Admin *small_admin() { return &_small_admin; }
inline Admin *medium_admin() { return &_medium_admin; }
inline Thread *threads() { return _registered_threads; }
inline pthread_mutex_t *threads_mutex() { return &_registered_threads_mutex; }
inline Region *region_list() { return _region_list; }
inline Large *large_list() { return _large_list; }
inline spin_lock_t *large_lock() { return &_large_lock; }
inline Statistics &statistics() { return _stats; }
inline Range &coverage() { return _coverage; }
inline TrackedPageAllocator &page_allocator() { return _page_allocator; }
inline PointerList &garbage_list() { return _garbage_list; }
inline Thread *current_thread_direct() {
if (_pthread_has_direct_tsd()) {
#define CASE_FOR_DIRECT_KEY(key) case key: return (Thread *)_pthread_getspecific_direct(key)
switch (_registered_threads_key) {
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY0);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY1);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY2);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY3);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY4);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY5);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY6);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY7);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY8);
CASE_FOR_DIRECT_KEY(__PTK_FRAMEWORK_GC_KEY9);
default: return NULL;
}
} else {
return (Thread *)pthread_getspecific(_registered_threads_key);
}
}
inline Thread *current_thread() {
Thread *thread = current_thread_direct();
if (__builtin_expect(thread == INVALID_THREAD_KEY_VALUE, 0)) {
auto_fatal("Zone::current_thread(): pthread looked up after unregister. Pthreads static key destructor ordering issue?\n");
}
return thread;
}
inline Thread ®istered_thread() {
Thread *thread = current_thread();
if (!thread) {
auto_error(this, "GC operation on unregistered thread. Thread registered implicitly. Break on auto_zone_thread_registration_error() to debug.\n", NULL);
auto_zone_thread_registration_error();
return register_thread();
}
return *thread;
}
static void destroy_registered_thread(void *key_value);
inline ScanStack &scan_stack() { return _scan_stack; }
inline void set_state(enum State ns) { _state = ns; }
inline bool is_state(enum State ns) { return _state == ns; }
inline spin_lock_t *associations_lock() { return &_associations_lock; }
inline AssocationsHashMap &assocations() { return _associations; }
#if UseArena
inline void * arena() { return _arena; }
#else
inline void * arena() { return (void *)0; }
#endif
inline bool threshold_reached() const { return _allocation_threshold <= 0; } inline void reset_threshold() { _allocation_threshold = control.collection_threshold; }
inline void adjust_threshold_counter(usword_t n) { _allocation_threshold -= n; }
static inline const usword_t subzone_index(void *address) { return (((usword_t)address & mask(arena_size_log2)) >> subzone_quantum_log2); }
static inline const usword_t subzone_count(const size_t size) { return partition2(size, subzone_quantum_log2); }
inline void activate_subzone(Subzone *subzone) { _in_subzone.set_bit_atomic(subzone_index(subzone)); }
inline bool address_in_arena(void *address) const {
#if UseArena
return ((usword_t)address & ~mask(arena_size_log2)) == (usword_t)_arena;
#else
return true;
#endif
}
inline const bool in_subzone_memory(void *address) const { return address_in_arena(address) && (bool)_in_subzone.bit(subzone_index(address)); }
inline const bool in_large_memory(void *address) const { return address_in_arena(address) && (bool)_in_large.bit(Large::quantum_index(address)); }
inline const bool in_zone_memory(void *address) const { return in_subzone_memory(address) || in_large_memory(address); }
static inline const usword_t good_block_size(usword_t size) {
if (size <= allocate_quantum_large) return align2(size, allocate_quantum_medium_log2);
return align2(size, allocate_quantum_small_log2);
}
inline bool is_block(void *address) {
return _coverage.in_range(address) && block_is_start(address);
}
void *block_allocate(Thread &thread, const size_t size, const unsigned layout, const bool clear, bool refcount_is_one);
void block_deallocate(void *block);
inline bool block_is_start(void *address) {
if (in_subzone_memory(address)) {
return Subzone::subzone(address)->block_is_start(address);
} else if (in_large_memory(address)) {
return Large::is_start(address);
}
return false;
}
void *block_start_large(void *address);
void *block_start(void *address);
usword_t block_size(void *address);
int block_layout(void *address);
void block_set_layout(void *address, int layout);
int get_refcount_small_medium(Subzone *subzone, void *address);
private:
int inc_refcount_small_medium(Subzone *subzone, void *block);
int dec_refcount_small_medium(Subzone *subzone, void *block);
inline void close_locks() {
spin_lock(_small_admin.lock());
spin_lock(_medium_admin.lock());
}
inline void open_locks() {
spin_unlock(_medium_admin.lock());
spin_unlock(_small_admin.lock());
}
public:
bool is_locked();
bool add_subzone(Admin *admin);
int block_refcount(void *block);
int block_increment_refcount(void *block);
int block_decrement_refcount(void *block);
void block_refcount_and_layout(void *block, int *refcount, int *layout);
inline bool is_new(void *block) {
if (in_subzone_memory(block)) {
return Subzone::subzone(block)->is_new(block);
} else if (in_large_memory(block) && Large::is_start(block)) {
return Large::is_new(block);
}
return false;
}
inline bool is_local(void *block) {
if (in_subzone_memory(block)) {
return Subzone::subzone(block)->is_thread_local(block);
}
return false;
}
inline bool block_is_garbage(void *block) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
return subzone->block_is_start(block) && subzone->is_garbage(block);
} else if (in_large_memory(block) && Large::is_start(block)) {
return Large::large(block)->is_garbage();
}
return false;
}
inline bool block_is_marked(void *block) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
return subzone->block_is_start(block) && subzone->is_marked(block);
} else if (in_large_memory(block) && Large::is_start(block)) {
return Large::large(block)->is_marked();
}
return false;
}
inline bool associations_should_be_marked(void *address) {
if (in_subzone_memory(address)) {
Subzone *subzone = Subzone::subzone(address);
return subzone->block_is_start(address) && subzone->is_marked(address);
} else if (in_large_memory(address) && Large::is_start(address)) {
return Large::large(address)->is_marked();
}
return true;
}
void set_associative_ref(void *block, void *key, void *value);
void *get_associative_ref(void *block, void *key);
void erase_associations_internal(void *block);
void erase_associations(void *block);
void erase_associations_in_range(const Range &r);
void scan_associations(MemoryScanner &scanner);
void pend_associations(void *block, MemoryScanner &scanner);
inline void add_root(void *root, void *value) {
Thread &thread = registered_thread();
thread.block_escaped(this, NULL, value);
EnliveningHelper<UnconditionalBarrier> barrier(thread);
SpinLock lock(&_roots_lock);
if (_roots.find(root) == _roots.end()) {
_roots.insert(root);
}
if (barrier && !block_is_marked(value)) barrier.enliven_block(value);
*(void **)root = value;
}
inline void add_root_no_barrier(void *root) {
#if DEBUG
#endif
SpinLock lock(&_roots_lock);
if (_roots.find(root) == _roots.end()) {
_roots.insert(root);
}
}
inline void copy_roots(PointerList &list) {
SpinLock lock(&_roots_lock);
usword_t count = _roots.size();
list.clear_count();
list.grow(count);
list.set_count(count);
std::copy(_roots.begin(), _roots.end(), (void**)list.buffer());
}
inline void copy_roots(PtrVector &list) {
SpinLock lock(&_roots_lock);
usword_t count = _roots.size();
list.resize(count);
std::copy(_roots.begin(), _roots.end(), list.begin());
}
inline void remove_root(void *root) {
SpinLock lock(&_roots_lock);
PtrHashSet::iterator iter = _roots.find(root);
if (iter != _roots.end()) {
_roots.erase(iter);
}
}
inline bool is_root(void *address) {
SpinLock lock(&_roots_lock);
PtrHashSet::iterator iter = _roots.find(address);
return (iter != _roots.end());
}
struct RangeLess {
bool operator()(const Range &r1, const Range &r2) const {
return (r1.address() < r2.address()) && (r1.end() <= r2.address()); }
};
inline void add_datasegment(const Range &r) {
SpinLock lock(&_datasegments_lock);
RangeVector::iterator i = std::lower_bound(_datasegments.begin(), _datasegments.end(), r, RangeLess());
_datasegments.insert(i, r);
}
struct RangeExcludes {
Range _range;
RangeExcludes(const Range &r) : _range(r) {}
bool operator()(void *address) { return !_range.in_range(address); }
};
struct RootRemover {
PtrHashSet &_roots;
RootRemover(PtrHashSet &roots) : _roots(roots) {}
void operator()(void *address) {
PtrHashSet::iterator iter = _roots.find(address);
if (iter != _roots.end()) _roots.erase(iter);
}
};
inline void remove_datasegment(const Range &r) {
{
SpinLock lock(&_datasegments_lock);
RangeVector::iterator i = std::lower_bound(_datasegments.begin(), _datasegments.end(), r, RangeLess());
if (i != _datasegments.end()) _datasegments.erase(i);
}
{
SpinLock lock(&_roots_lock);
PtrVector rootsToRemove;
std::remove_copy_if(_roots.begin(), _roots.end(), std::back_inserter(rootsToRemove), RangeExcludes(r));
std::for_each(rootsToRemove.begin(), rootsToRemove.end(), RootRemover(_roots));
}
erase_associations_in_range(r);
weak_unregister_data_segment(this, r.address(), r.size());
}
inline void add_datasegment(void *address, size_t size) { add_datasegment(Range(address, size)); }
inline void remove_datasegment(void *address, size_t size) { remove_datasegment(Range(address, size)); }
inline bool is_global_address(void *address) {
SpinLock lock(&_datasegments_lock);
return std::binary_search(_datasegments.begin(), _datasegments.end(), Range(address, sizeof(void*)), RangeLess());
}
#if DEBUG
struct RangePrinter {
void operator() (const Range &r) {
printf("{ address = %p, end = %p }\n", r.address(), r.end());
}
};
inline void print_datasegments() {
SpinLock lock(&_datasegments_lock);
std::for_each(_datasegments.begin(), _datasegments.end(), RangePrinter());
}
void test_datasegments() {
Range r1((void*)0x1000, 512), r2((void*)0xA000, 512);
add_datasegment(r1);
add_datasegment(r2);
print_datasegments();
Range r3(r1), r4(r2);
r3.adjust(r1.size()), r4.adjust(-r2.size());
add_datasegment(r3);
add_datasegment(r4);
print_datasegments();
assert(is_global_address(r1.address()));
assert(is_global_address(displace(r1.address(), 0x10)));
assert(is_global_address(displace(r1.end(), -sizeof(void*))));
assert(is_global_address(displace(r2.address(), 0xA0)));
assert(is_global_address(displace(r3.address(), 0x30)));
assert(is_global_address(displace(r4.address(), 0x40)));
remove_datasegment(r2);
print_datasegments();
assert(!is_global_address(displace(r2.address(), 0xA0)));
remove_datasegment(r1);
assert(!is_global_address(displace(r1.address(), 0x10)));
print_datasegments();
remove_datasegment(r3);
remove_datasegment(r4);
print_datasegments();
}
#endif
inline void erase_weak(void *ptr) {
if (control.weak_layout_for_address) {
const unsigned char* weak_layout = control.weak_layout_for_address((auto_zone_t*)zone, ptr);
if (weak_layout) weak_unregister_with_layout(this, (void**)ptr, weak_layout);
}
}
inline void add_zombie(void *address) {
SpinLock lock(&_zombies_lock);
if (_zombies.find(address) == _zombies.end()) {
_zombies.insert(address);
}
}
inline bool is_zombie(void *address) {
SpinLock lock(&_zombies_lock);
PtrHashSet::iterator iter = _zombies.find(address);
return (iter != _zombies.end());
}
inline void clear_zombies() {
SpinLock lock(&_zombies_lock);
_zombies.clear();
}
bool set_write_barrier(Thread &thread, void *address, void *value);
bool set_write_barrier_range(void *address, const usword_t size);
bool set_write_barrier(void *address);
void mark_write_barriers_untouched();
void clear_untouched_write_barriers();
void clear_all_write_barriers();
void reset_all_marks();
void reset_all_marks_and_pending();
void statistics(Statistics &stats);
void scan_stack_push_block(void *block) {
_scan_stack.push(block);
}
void scan_stack_push_range(Range &range) {
_scan_stack.push(range.end());
_scan_stack.push(displace(range.address(), 1));
}
void scan_stack_push_write_barrier(WriteBarrier *wb) {
_scan_stack.push(displace(wb, 3));
}
bool scan_stack_is_empty() { return _scan_stack.is_empty() || _scan_stack.is_overflow(); }
bool scan_stack_is_range() {
void *pointer = _scan_stack.top();
return (uintptr_t(pointer) & 0x3) == 0x1;
}
bool scan_stack_is_write_barrier() {
void *pointer = _scan_stack.top();
return (uintptr_t(pointer) & 0x3) == 0x3;
}
void *scan_stack_pop_block() {
return _scan_stack.pop();
}
Range scan_stack_pop_range() {
void *block1 = _scan_stack.pop();
void *block2 = _scan_stack.pop();
return Range(displace(block1, -1), block2);
}
WriteBarrier *scan_stack_pop_write_barrier() {
void *pointer = _scan_stack.pop();
return (WriteBarrier*) displace(pointer, -3);
}
unsigned long int scan_stack_max() { return _scan_stack.max(); }
inline void set_repair_write_barrier(bool repair) { _repair_write_barrier = repair; }
inline bool repair_write_barrier() const { return _repair_write_barrier; }
void set_needs_enlivening();
void enlivening_barrier(MemoryScanner &scanner);
void clear_needs_enlivening();
inline void collect_begin() {
}
inline void collect_end() {
_garbage_list.uncommit();
_small_admin.purge_free_space();
_medium_admin.purge_free_space();
}
bool block_collector();
void unblock_collector();
void collect(bool is_partial, void *current_stack_bottom, auto_date_t *scan_end);
void scavenge_blocks();
void invalidate_garbage(const size_t garbage_count, const vm_address_t *garbage);
void handle_overretained_garbage(void *block, int rc);
size_t free_garbage(boolean_t generational, const size_t garbage_count, vm_address_t *garbage, size_t &blocks_freed, size_t &bytes_freed);
void release_pages() {
}
void recycle_threads();
Thread ®ister_thread();
void unregister_thread();
private:
Thread *firstScannableThread();
Thread *nextScannableThread(Thread *thread);
public:
typedef void (^thread_scanner_t) (Thread *thread);
void scan_registered_threads(thread_scanner_t scanner);
void scan_registered_threads(void (*scanner) (Thread *, void *), void *arg) { scan_registered_threads(^(Thread *thread) { scanner(thread, arg); }); }
void suspend_all_registered_threads();
void resume_all_registered_threads();
unsigned has_weak_references() { return (num_weak_refs != 0); }
const unsigned char *layout_map_for_block(void *block) {
return control.layout_for_address ? control.layout_for_address((auto_zone_t *)this, block) : NULL;
}
const unsigned char *weak_layout_map_for_block(void *block) {
return control.weak_layout_for_address ? control.weak_layout_for_address((auto_zone_t *)this, block) : NULL;
}
void print_all_blocks();
void print_block(void *block);
void print_block(void *block, const char *tag);
void malloc_statistics(malloc_statistics_t *stats);
void dump(
auto_zone_stack_dump stack_dump,
auto_zone_register_dump register_dump,
auto_zone_node_dump thread_local_node_dump,
auto_zone_root_dump root_dump,
auto_zone_node_dump global_node_dump,
auto_zone_weak_dump weak_dump_entry
);
};
};
#endif // __AUTO_ZONE_CORE__