#pragma once
#ifndef __AUTO_ZONE_CORE__
#define __AUTO_ZONE_CORE__
#include "auto_zone.h"
#include "auto_impl_utilities.h"
#include "AutoBitmap.h"
#include "AutoConfiguration.h"
#include "AutoDefs.h"
#include "AutoLarge.h"
#include "AutoListTypes.h"
#include "AutoLock.h"
#include "AutoHashTable.h"
#include "AutoRegion.h"
#include "AutoStatistics.h"
#include "AutoSubzone.h"
#include "AutoThread.h"
#include <algorithm>
namespace Auto {
class PointerList {
usword_t _count;
usword_t _capacity;
vm_address_t *_buffer;
Statistics &_stats; public:
PointerList(Statistics &s) : _count(0), _capacity(0), _buffer(NULL), _stats(s) {}
~PointerList() { if (_buffer) deallocate_memory(_buffer, _capacity * sizeof(vm_address_t)); }
usword_t count() const { return _count; }
void clear_count() { _count = 0; }
void set_count(usword_t n) { _count = n; }
vm_address_t *buffer() { return _buffer; }
usword_t size() { return _capacity * sizeof(vm_address_t); }
void uncommit() { if (_buffer) uncommit_memory(_buffer, _capacity * sizeof(vm_address_t)); }
void grow() {
if (!_buffer) {
_capacity = 4 * page_size / sizeof(vm_address_t);
_buffer = (vm_address_t*) allocate_memory(page_size * 4);
_stats.add_admin(page_size * 4); } else {
vm_size_t old_size = _capacity * sizeof(vm_address_t);
vm_address_t *new_buffer = (vm_address_t*) allocate_memory(old_size * 2);
if (!new_buffer) {
malloc_printf("PointerList::grow() failed.\n");
abort();
}
_stats.add_admin(old_size);
_capacity *= 2;
vm_copy(mach_task_self(), (vm_address_t) _buffer, old_size, (vm_address_t)new_buffer);
deallocate_memory(_buffer, old_size);
_buffer = new_buffer;
}
}
void grow(usword_t count) {
if (count > _capacity) {
usword_t old_size = _capacity * sizeof(vm_address_t);
if (_capacity == 0L) _capacity = 4 * page_size / sizeof(vm_address_t);
while (count > _capacity) _capacity *= 2;
vm_address_t *new_buffer = (vm_address_t*) allocate_memory(_capacity * sizeof(vm_address_t));
if (!new_buffer) {
malloc_printf("PointerList::grow(count=%lu) failed.\n", count);
abort();
}
_stats.add_admin(_capacity * sizeof(vm_address_t) - old_size);
if (_buffer) {
if (new_buffer && _count) {
vm_copy(mach_task_self(), (vm_address_t) _buffer, old_size, (vm_address_t)new_buffer);
}
deallocate_memory(_buffer, old_size);
}
_buffer = new_buffer;
}
}
void add(vm_address_t addr) {
if (_count == _capacity) grow();
_buffer[_count++] = addr;
}
void add(void *pointer) {
add((vm_address_t)pointer);
}
};
class ScanStack {
private:
void ** _address; void ** _end; void ** _cursor; void ** _highwater;
public:
ScanStack()
: _address(NULL)
, _end(NULL)
, _cursor(NULL)
, _highwater(NULL)
{}
inline void set_range(Range range) {
set_range(range.address(), range.end());
}
inline void set_range(void *address, void *end) {
_address = (void **)address;
_end = (void **)end;
_cursor = (void **)address;
_highwater = (void **)address;
}
inline void set_range(void *address, usword_t size) {
set_range(address, displace(address, size));
}
void reset() {
_cursor = _address;
_highwater = _address;
}
inline bool is_allocated() const {
return _address != NULL;
}
inline bool is_empty() const {
return _cursor == _address;
}
inline bool is_overflow() const {
return _cursor == _end;
}
inline void push(void *block) {
if (!is_overflow()) {
*_cursor++ = block;
if (_highwater < _cursor) _highwater = _cursor;
}
}
inline void *top() {
if (!is_empty() && !is_overflow()) return _cursor[-1];
return NULL;
}
inline void *pop() {
if (!is_empty() && !is_overflow()) return *--_cursor;
return NULL;
}
};
enum State {
idle, scanning, enlivening, finalizing, reclaiming
};
class Zone : public azone_t {
friend class Monitor;
friend class MemoryScanner;
private:
static bool _is_auto_initialized; static Zone *_last_created;
pthread_key_t _registered_thread_key; Thread *_registered_threads; spin_lock_t _registered_threads_lock;
pthread_key_t _thread_finalizing_key;
Bitmap _in_subzone; Bitmap _in_large; Large *_large_list; spin_lock_t _large_lock; PtrHashSet _roots; spin_lock_t _roots_lock; PtrHashSet _zombies; spin_lock_t _zombies_lock; Region *_region_list; spin_lock_t _region_lock; PtrIntHashMap _retains; spin_lock_t _retains_lock; bool _is_partial; bool _repair_write_barrier; bool _use_pending; ScanStack _scan_stack; bool _some_pending; Range _coverage; spin_lock_t _coverage_lock; volatile bool _needs_enlivening; PointerList _enlivening_queue; spin_lock_t _enlivening_lock; Statistics _stats; uint32_t _bytes_allocated; Monitor *_monitor; PointerList _garbage_list; PtrAssocHashMap _associations; spin_lock_t _associations_lock; bool _scanning_associations; volatile enum State _state;
#if UseArena
void *_arena; void *_large_start; Bitmap _large_bits; spin_lock_t _large_bits_lock; #endif
void (Zone::*_deallocate_large) (void *block);
void deallocate_large_normal(void *block);
void deallocate_large_collecting(void *block);
Region *allocate_region();
void *allocate_large(const size_t size, const unsigned layout, bool clear, bool refcount_is_one);
void deallocate_large(void *block) {
SpinLock lock(&_large_lock);
(this->*_deallocate_large)(block);
}
inline Large *find_large(void *block) { return Large::large(block); }
void *allocate_small_medium(const size_t size, const unsigned layout, bool clear, bool refcount_is_one);
void deallocate_small_medium(void *block);
public:
#if UseArena
void *arena_allocate_region(usword_t newsize);
#endif
void *arena_allocate_large(usword_t size);
void arena_deallocate(void *, size_t size);
static inline const usword_t admin_offset() { return align(sizeof(Zone), page_size); }
static inline const usword_t bytes_needed() {
usword_t in_subzone_size = Bitmap::bytes_needed(subzone_quantum_max);
usword_t in_large_size = Bitmap::bytes_needed(allocate_quantum_large_max);
#if UseArena
usword_t arena_size = Bitmap::bytes_needed(allocate_quantum_large_max);
#else
usword_t arena_size = 0;
#endif
return admin_offset() + in_subzone_size + in_large_size + arena_size;
}
inline void *operator new(const size_t size) {
void *allocation_address = allocate_guarded_memory(bytes_needed());
if (!allocation_address) error("Can not allocate zone");
return allocation_address;
}
inline void operator delete(void *zone) {
if (zone) deallocate_guarded_memory(zone, bytes_needed());
}
static void setup_shared();
Zone();
~Zone();
static inline Zone *zone() { return _last_created; }
inline Thread *threads() { return _registered_threads; }
inline spin_lock_t *threads_lock() { return &_registered_threads_lock; }
inline Region *region_list() { return _region_list; }
inline Large *large_list() { return _large_list; }
inline spin_lock_t *large_lock() { return &_large_lock; }
inline pthread_key_t registered_thread_key() const { return _registered_thread_key; }
inline Statistics &statistics() { return _stats; }
inline Range &coverage() { return _coverage; }
inline Monitor *monitor() { return _monitor; }
inline void set_monitor(Monitor *monitor) { _monitor = monitor; }
inline PointerList &garbage_list() { return _garbage_list; }
inline bool volatile *needs_enlivening() { return &_needs_enlivening; }
inline spin_lock_t *enlivening_lock() { return &_enlivening_lock; }
inline PointerList &enlivening_queue() { return _enlivening_queue; }
inline ScanStack &scan_stack() { return _scan_stack; }
inline void set_state(enum State ns) { _state = ns; }
inline bool is_state(enum State ns) { return _state == ns; }
inline spin_lock_t *associations_lock() { return &_associations_lock; }
#if UseArena
inline void * arena() { return _arena; }
#else
inline void * arena() { return (void *)0; }
#endif
inline uint32_t bytes_allocated() const { return _bytes_allocated; }
inline void clear_bytes_allocated() { _bytes_allocated = 0; }
inline void add_allocated_bytes(usword_t n) { _bytes_allocated += n; }
static inline const usword_t subzone_index(void *address) { return (((usword_t)address & mask(arena_size_log2)) >> subzone_quantum_log2); }
static inline const usword_t subzone_count(const size_t size) { return partition2(size, subzone_quantum_log2); }
inline void activate_subzone(Subzone *subzone) { _in_subzone.set_bit_atomic(subzone_index(subzone)); }
inline bool address_in_arena(void *address) const {
#if UseArena
return ((usword_t)address & ~mask(arena_size_log2)) == (usword_t)_arena;
#else
return true;
#endif
}
inline const bool in_subzone_memory(void *address) const { return address_in_arena(address) && (bool)_in_subzone.bit(subzone_index(address)); }
inline const bool in_large_memory(void *address) const { return address_in_arena(address) && (bool)_in_large.bit(Large::quantum_index(address)); }
inline const bool in_zone_memory(void *address) const { return in_subzone_memory(address) || in_large_memory(address); }
static inline const usword_t good_block_size(usword_t size) {
if (size <= allocate_quantum_large) return align2(size, allocate_quantum_medium_log2);
return align2(size, allocate_quantum_small_log2);
}
inline bool is_block(void *address) {
return _coverage.in_range(address) && block_is_start(address);
}
void *block_allocate(const size_t size, const unsigned layout, const bool clear, bool refcount_is_one);
void block_deallocate(void *block);
void block_deallocate_internal(void *block);
inline bool block_is_start(void *address) {
if (in_subzone_memory(address)) {
return Subzone::subzone(address)->is_start(address);
} else if (in_large_memory(address)) {
return Large::is_start(address);
}
return false;
}
void *block_start_large(void *address);
void *block_start(void *address);
usword_t block_size(void *block);
int block_layout(void *block);
void block_set_layout(void *block, int layout);
private:
int get_refcount_small_medium(Subzone *subzone, void *block);
int inc_refcount_small_medium(Subzone *subzone, void *block);
int dec_refcount_small_medium(Subzone *subzone, void *block);
inline void close_locks() {
spin_lock(&_region_lock);
for (Region *region = _region_list; region != NULL; region = region->next()) {
region->lock();
}
spin_lock(&_large_lock);
spin_lock(&_enlivening_lock);
}
inline void open_locks() {
spin_unlock(&_enlivening_lock);
spin_unlock(&_large_lock);
for (Region *region = _region_list; region != NULL; region = region->next()) {
region->unlock();
}
spin_unlock(&_region_lock);
}
public:
int block_refcount(void *block);
int block_increment_refcount(void *block);
int block_decrement_refcount(void *block);
void block_refcount_and_layout(void *block, int *refcount, int *layout);
inline bool block_is_new(void *block) {
if (in_subzone_memory(block)) {
return Subzone::subzone(block)->is_new(block);
} else if (in_large_memory(block)) {
return Large::is_new(block);
}
return false;
}
inline bool block_is_garbage(void *block) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
return !subzone->is_marked(block) && !subzone->is_newest(block);
} else if (in_large_memory(block)) {
Large *large = Large::large(block);
return !large->is_marked() && !large->is_newest();
}
return false;
}
inline bool block_is_marked(void *block) {
if (in_subzone_memory(block)) {
Subzone *subzone = Subzone::subzone(block);
return subzone->is_marked(block);
} else if (in_large_memory(block)) {
Large *large = Large::large(block);
return large->is_marked();
}
return false;
}
void set_associative_ref(void *block, void *key, void *value) {
if (value) {
UnconditionalBarrier barrier(&_needs_enlivening, &_enlivening_lock);
SpinLock lock(&_associations_lock);
_associations[block][key] = value;
if (barrier) _enlivening_queue.add(value);
} else {
SpinLock lock(&_associations_lock);
PtrPtrHashMap &refs = _associations[block];
PtrPtrHashMap::iterator i = refs.find(key);
if (i != refs.end()) refs.erase(i);
}
}
void *get_associative_ref(void *block, void *key) {
SpinLock lock(&_associations_lock);
PtrAssocHashMap::iterator i = _associations.find(block);
if (i != _associations.end()) {
PtrPtrHashMap &refs = i->second;
PtrPtrHashMap::iterator j = refs.find(key);
if (j != refs.end()) return j->second;
}
return NULL;
}
void scan_associations(MemoryScanner &scanner);
void pend_associations(void *block) {
PtrAssocHashMap::iterator i = _associations.find(block);
if (i != _associations.end()) {
PtrPtrHashMap &refs = i->second;
for (PtrPtrHashMap::iterator j = refs.begin(); j != refs.end(); j++) {
set_pending(j->second);
}
}
}
void erase_associations(void *block) {
SpinLock lock(&_associations_lock);
if (_associations.size() == 0) return;
PtrAssocHashMap::iterator iter = _associations.find(block);
if (iter != _associations.end()) _associations.erase(iter);
}
inline void add_root(void *root, void *value) {
UnconditionalBarrier barrier(&_needs_enlivening, &_enlivening_lock);
SpinLock lock(&_roots_lock);
if (_roots.find(root) == _roots.end()) {
_roots.insert(root);
}
if (barrier && !block_is_marked(value)) _enlivening_queue.add(value);
*(void **)root = value;
}
inline void add_root_no_barrier(void *root) {
SpinLock lock(&_roots_lock);
if (_roots.find(root) == _roots.end()) {
_roots.insert(root);
}
}
inline void copy_roots(PointerList &list) {
SpinLock lock(&_roots_lock);
usword_t count = _roots.size();
list.clear_count();
list.grow(count);
list.set_count(count);
std::copy(_roots.begin(), _roots.end(), (void**)list.buffer());
}
inline void remove_root(void *root) {
SpinLock lock(&_roots_lock);
PtrHashSet::iterator iter = _roots.find(root);
if (iter != _roots.end()) {
_roots.erase(iter);
}
}
inline bool is_root(void *address) {
SpinLock lock(&_roots_lock);
PtrHashSet::iterator iter = _roots.find(address);
return (iter != _roots.end());
}
inline void add_zombie(void *address) {
SpinLock lock(&_zombies_lock);
if (_zombies.find(address) == _zombies.end()) {
_zombies.insert(address);
}
}
inline bool is_zombie(void *address) {
SpinLock lock(&_zombies_lock);
PtrHashSet::iterator iter = _zombies.find(address);
return (iter != _zombies.end());
}
inline void clear_zombies() {
SpinLock lock(&_zombies_lock);
_zombies.clear();
}
bool set_pending(void *block);
void repend(void *address);
bool set_write_barrier(void *address, void *value);
bool set_write_barrier_range(void *address, const usword_t size);
bool set_write_barrier(void *address);
void write_barrier_scan_unmarked_content(void *block, const usword_t size, MemoryScanner &scanner);
void mark_write_barriers_untouched();
void clear_untouched_write_barriers();
void clear_all_write_barriers();
void reset_all_marks();
void reset_all_marks_and_pending();
void statistics(Statistics &stats);
void scan_stack_push_block(void *block) {
_scan_stack.push(block);
}
void scan_stack_push_range(Range &range) {
_scan_stack.push(range.end());
_scan_stack.push(displace(range.address(), 1));
}
bool scan_stack_is_empty() { return _scan_stack.is_empty() || _scan_stack.is_overflow(); }
bool scan_stack_is_range() {
void *block = _scan_stack.top();
return !is_bit_aligned(block, 1);
}
void *scan_stack_pop_block() {
return _scan_stack.pop();
}
Range scan_stack_pop_range() {
void *block1 = _scan_stack.pop();
void *block2 = _scan_stack.pop();
return Range(displace(block1, -1), block2);
}
inline bool repair_write_barrier() const { return _repair_write_barrier; }
inline bool is_some_pending () const { return _some_pending; }
inline void set_some_pending () { _some_pending = true; }
inline void clear_some_pending () { _some_pending = false; }
inline bool use_pending () const { return _use_pending; }
inline void set_use_pending () { _use_pending = true; }
inline void clear_use_pending () { _use_pending = false; }
inline void set_needs_enlivening() {
close_locks();
_needs_enlivening = true;
open_locks();
}
inline void clear_needs_enlivening() {
_needs_enlivening = false;
}
inline void collect_begin(bool is_partial) {
SpinLock lock(&_large_lock);
_deallocate_large = &Zone::deallocate_large_collecting;
_is_partial = is_partial;
}
inline void collect_end() {
reset_all_marks();
_is_partial = false;
SpinLock lock(&_large_lock);
Large *large = _large_list;
while (large != NULL) {
Large *next = large->next();
if (large->is_freed()) deallocate_large_normal(large->address());
large = next;
}
_deallocate_large = &Zone::deallocate_large_normal;
_garbage_list.uncommit();
}
void block_collector();
void unblock_collector();
void collect(bool is_partial, void *current_stack_bottom, auto_date_t *scan_end);
void scavenge_blocks();
void release_pages() {
}
void register_thread();
void unregister_thread();
void suspend_all_registered_threads();
void resume_all_registered_threads();
void set_thread_finalizing(bool is_finalizing) {
pthread_setspecific(_thread_finalizing_key, (void*)is_finalizing);
}
bool is_thread_finalizing() { return is_state(finalizing) && (bool)pthread_getspecific(_thread_finalizing_key); }
unsigned has_weak_references() { return (num_weak_refs != 0); }
const unsigned char *layout_map_for_block(void *block) {
return control.layout_for_address ? control.layout_for_address((auto_zone_t *)this, block) : NULL;
}
const unsigned char *weak_layout_map_for_block(void *block) {
return control.weak_layout_for_address ? control.weak_layout_for_address((auto_zone_t *)this, block) : NULL;
}
void print_all_blocks();
void print_block(void *block);
void print_block(void *block, const char *tag);
};
};
#endif // __AUTO_ZONE_CORE__