GitLab will continue to be upgraded from 11.4.5-ce.0 on November 25th 2019 at 4.00pm (AEDT) to 5.00pm (AEDT) due to Critical Security Patch Availability. During the update, GitLab and Mattermost services will not be available.

Commit 12a6f5bf authored by qinsoon's avatar qinsoon

[wip] keep working on treadmill allocator

make tracing able to trace freelist space (may have degraded
performance, gonna need to tune gc performance later)
parent c95de6d9
Pipeline #226 failed with stage
in 25 minutes and 30 seconds
...@@ -4,10 +4,6 @@ version = "0.0.1" ...@@ -4,10 +4,6 @@ version = "0.0.1"
authors = ["qinsoon <qinsoon@gmail.com>"] authors = ["qinsoon <qinsoon@gmail.com>"]
build = "build.rs" build = "build.rs"
[features]
default = ["parallel-gc"]
parallel-gc = []
[lib] [lib]
crate-type = ["rlib"] crate-type = ["rlib"]
...@@ -17,6 +13,7 @@ gcc = "0.3" ...@@ -17,6 +13,7 @@ gcc = "0.3"
[dependencies] [dependencies]
utils = {path = "../utils"} utils = {path = "../utils"}
time = "0.1.34"
lazy_static = "0.1.15" lazy_static = "0.1.15"
log = "0.3.5" log = "0.3.5"
simple_logger = "0.4.0" simple_logger = "0.4.0"
......
...@@ -31,9 +31,9 @@ impl FreeListSpace { ...@@ -31,9 +31,9 @@ impl FreeListSpace {
} }
pub fn alloc(&mut self, size: usize, align: usize) -> Option<Address> { pub fn alloc(&mut self, size: usize, align: usize) -> Address {
if self.used_bytes + size > self.size { if self.used_bytes + size > self.size {
None unsafe {Address::zero()}
} else { } else {
let ret = aligned_alloc::aligned_alloc(size, align); let ret = aligned_alloc::aligned_alloc(size, align);
...@@ -43,7 +43,7 @@ impl FreeListSpace { ...@@ -43,7 +43,7 @@ impl FreeListSpace {
self.node_id += 1; self.node_id += 1;
self.used_bytes += size; self.used_bytes += size;
Some(addr) addr
} }
} }
...@@ -106,27 +106,6 @@ pub enum NodeMark { ...@@ -106,27 +106,6 @@ pub enum NodeMark {
} }
unsafe impl Sync for NodeMark {} unsafe impl Sync for NodeMark {}
#[inline(never)]
pub fn alloc_large(size: usize, align: usize, mutator: &mut immix::ImmixMutatorLocal, space: Arc<RwLock<FreeListSpace>>) -> Address {
loop {
mutator.yieldpoint();
let ret_addr = {
let mut lo_space_lock = space.write().unwrap();
lo_space_lock.alloc(size, align)
};
match ret_addr {
Some(addr) => {
return addr;
},
None => {
gc::trigger_gc();
}
}
}
}
use std::fmt; use std::fmt;
impl fmt::Display for FreeListSpace { impl fmt::Display for FreeListSpace {
......
mod malloc_list; mod malloc_list;
mod treadmill; mod treadmill;
pub use heap::freelist::malloc_list::FreeListSpace; //pub use heap::freelist::malloc_list::FreeListSpace;
pub use heap::freelist::malloc_list::alloc_large; pub use heap::freelist::treadmill::FreeListSpace;
\ No newline at end of file
use std::sync::Arc;
use std::sync::RwLock;
use heap::gc;
use utils::{Address, ObjectReference};
use heap::immix;
#[inline(never)]
pub fn alloc_large(size: usize, align: usize, mutator: &mut immix::ImmixMutatorLocal, space: Arc<FreeListSpace>) -> Address {
loop {
mutator.yieldpoint();
let ret_addr = space.alloc(size, align);
if ret_addr.is_zero() {
gc::trigger_gc();
} else {
return ret_addr;
}
}
}
\ No newline at end of file
...@@ -2,9 +2,12 @@ ...@@ -2,9 +2,12 @@
use utils::Address; use utils::Address;
use utils::mem::memmap; use utils::mem::memmap;
use common::AddressMap;
use std::ptr; use std::ptr;
use std::sync::Arc; use std::sync::Arc;
use common::AddressMap; use std::fmt;
use std::sync::Mutex;
const SPACE_ALIGN : usize = 1 << 19; const SPACE_ALIGN : usize = 1 << 19;
const BLOCK_SIZE : usize = 1 << 12; // 4kb const BLOCK_SIZE : usize = 1 << 12; // 4kb
...@@ -20,7 +23,7 @@ pub struct FreeListSpace { ...@@ -20,7 +23,7 @@ pub struct FreeListSpace {
#[allow(dead_code)] #[allow(dead_code)]
mmap : memmap::Mmap, mmap : memmap::Mmap,
treadmill: TreadMill treadmill: Mutex<Treadmill>
} }
impl FreeListSpace { impl FreeListSpace {
...@@ -32,44 +35,202 @@ impl FreeListSpace { ...@@ -32,44 +35,202 @@ impl FreeListSpace {
let start : Address = Address::from_ptr::<u8>(anon_mmap.ptr()).align_up(SPACE_ALIGN); let start : Address = Address::from_ptr::<u8>(anon_mmap.ptr()).align_up(SPACE_ALIGN);
let end : Address = start.plus(space_size); let end : Address = start.plus(space_size);
let trace_map = AddressMap::new(start, end);
let alloc_map = AddressMap::new(start, end);
if cfg!(debug_assertions) {
trace_map.init_all(0);
alloc_map.init_all(0);
}
let treadmill = Treadmill::new(start, end);
FreeListSpace {
start: start,
end: end,
alloc_map: Arc::new(alloc_map),
trace_map: Arc::new(trace_map),
mmap: anon_mmap,
treadmill: Mutex::new(treadmill)
}
}
pub fn alloc(&self, size: usize, align: usize) -> Address {
// every block is 'BLOCK_SIZE' aligned, usually we do not need to align
assert!(BLOCK_SIZE % align == 0);
let blocks_needed = if size % BLOCK_SIZE == 0 {
size / BLOCK_SIZE
} else {
size / BLOCK_SIZE + 1
};
trace!("requiring {} bytes ({} blocks)", size, blocks_needed);
let mut treadmill = self.treadmill.lock().unwrap();
let res = treadmill.alloc_blocks(blocks_needed);
res
}
pub fn sweep(&self) {
let mut treadmill = self.treadmill.lock().unwrap();
unimplemented!() unimplemented!()
} }
} }
struct TreadMill{ use heap::Space;
free: *mut TreadMillNode,
scan: *mut TreadMillNode, impl Space for FreeListSpace {
t : *mut TreadMillNode, #[inline(always)]
b : *mut TreadMillNode fn start(&self) -> Address {
self.start
}
#[inline(always)]
fn end(&self) -> Address {
self.end
}
#[inline(always)]
fn alloc_map(&self) -> *mut u8 {
self.alloc_map.ptr
}
#[inline(always)]
fn trace_map(&self) -> *mut u8 {
self.trace_map.ptr
}
}
unsafe impl Sync for FreeListSpace {}
unsafe impl Send for FreeListSpace {}
impl fmt::Display for FreeListSpace {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "FreeListSpace\n").unwrap();
write!(f, "range={:#X} ~ {:#X}\n", self.start, self.end).unwrap();
let treadmill : &Treadmill = &self.treadmill.lock().unwrap();
write!(f, "treadmill: {}", treadmill)
}
} }
impl TreadMill { struct Treadmill{
fn new(start: Address, end: Address) -> TreadMill { available_color: TreadmillNodeColor,
free: *mut TreadmillNode,
scan: *mut TreadmillNode,
t : *mut TreadmillNode,
b : *mut TreadmillNode
}
impl Treadmill {
fn new(start: Address, end: Address) -> Treadmill {
let mut addr = start; let mut addr = start;
let free = TreadMillNode::singleton(addr); let free = TreadmillNode::singleton(addr);
addr = addr.plus(BLOCK_SIZE);
let mut tail = free; let mut tail = free;
while addr < end { while addr < end {
tail = TreadMillNode::insert_after(tail, addr); tail = unsafe {(&mut *tail)}.insert_after(addr);
addr = addr.plus(BLOCK_SIZE);
} }
unimplemented!() Treadmill {
available_color: TreadmillNodeColor::Ecru,
free: free,
scan: free,
t: free,
b: free
}
}
fn alloc_blocks(&mut self, n_blocks: usize) -> Address {
// check if we have n_blocks available
let mut cur = self.free;
for _ in 0..n_blocks {
if unsafe{&*cur}.color != self.available_color {
return unsafe {Address::zero()};
}
cur = unsafe {&*cur}.next;
}
// we make sure that n_blocks are available, mark them as black
let mut cur2 = self.free;
for _ in 0..n_blocks {
unsafe{&mut *cur2}.color = TreadmillNodeColor::Black;
cur2 = unsafe {&*cur2}.next
}
let ret = self.free;
self.free = cur;
unsafe{&*ret}.payload
} }
} }
impl fmt::Display for Treadmill {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut cursor = self.free;
struct TreadMillNode { loop {
write!(f, "{}", unsafe{&*cursor}).unwrap();
if cursor == self.free {
write!(f, "(free)").unwrap();
}
if cursor == self.scan {
write!(f, "(scan)").unwrap();
}
if cursor == self.b {
write!(f, "(bottom)").unwrap();
}
if cursor == self.t {
write!(f, "(top)").unwrap();
}
if unsafe{&*cursor}.next() == self.free {
break;
} else {
write!(f, "->").unwrap();
cursor = unsafe{&*cursor}.next();
}
}
Ok(())
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum TreadmillNodeColor {
Ecru,
White,
Black,
Grey
}
struct TreadmillNode {
payload: Address, payload: Address,
color: TreadmillNodeColor,
prev: *mut TreadMillNode, prev: *mut TreadmillNode,
next: *mut TreadMillNode next: *mut TreadmillNode
} }
impl TreadMillNode { impl fmt::Display for TreadmillNode {
fn singleton(addr: Address) -> *mut TreadMillNode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut ptr = Box::into_raw(Box::new(TreadMillNode { write!(f, "[{}-{:?}]", self.payload, self.color)
}
}
impl TreadmillNode {
fn singleton(addr: Address) -> *mut TreadmillNode {
let mut ptr = Box::into_raw(Box::new(TreadmillNode {
payload: addr, payload: addr,
color: TreadmillNodeColor::Ecru,
prev: ptr::null_mut(), prev: ptr::null_mut(),
next: ptr::null_mut(), next: ptr::null_mut(),
})); }));
...@@ -84,23 +245,75 @@ impl TreadMillNode { ...@@ -84,23 +245,75 @@ impl TreadMillNode {
} }
/// returns the inserted node /// returns the inserted node
fn insert_after(node: *mut TreadMillNode, addr: Address) -> *mut TreadMillNode { fn insert_after(&mut self, addr: Address) -> *mut TreadmillNode {
unsafe { unsafe {
// node <- ptr -> node.next // node <- ptr -> node.next
let mut ptr = Box::into_raw(Box::new(TreadMillNode { let mut ptr = Box::into_raw(Box::new(TreadmillNode {
payload: addr, payload: addr,
color: TreadmillNodeColor::Ecru,
// inserted between node and node.next // inserted between node and node.next
prev: node, prev: self as *mut TreadmillNode,
next: (*node).next next: self.next
})); }));
// ptr <- node.next // ptr <- node.next
(*(*node).next).prev = ptr; unsafe{(&mut *self.next)}.prev = ptr;
// node -> ptr // node -> ptr
(*node).next = ptr; self.next = ptr;
ptr ptr
} }
} }
fn next(&self) -> *mut TreadmillNode {
self.next
}
fn prev(&self) -> *mut TreadmillNode {
self.prev
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::BLOCK_SIZE;
#[test]
fn test_new_treadmill_space() {
let space = FreeListSpace::new(BLOCK_SIZE * 10);
println!("{}", space);
}
#[test]
fn test_treadmill_alloc() {
let mut space = FreeListSpace::new(BLOCK_SIZE * 10);
for i in 0..10 {
let ret = space.alloc(BLOCK_SIZE, 8);
println!("Allocation{}: {}", i, ret);
}
}
#[test]
fn test_treadmill_alloc_spanblock() {
let mut space = FreeListSpace::new(BLOCK_SIZE * 10);
for i in 0..5 {
let ret = space.alloc(BLOCK_SIZE * 2, 8);
println!("Allocation{}: {}", i, ret);
}
}
#[test]
fn test_treadmill_alloc_exhaust() {
let mut space = FreeListSpace::new(BLOCK_SIZE * 10);
for i in 0..20 {
let ret = space.alloc(BLOCK_SIZE, 8);
println!("Allocation{}: {}", i, ret);
}
}
} }
\ No newline at end of file
This diff is collapsed.
...@@ -73,16 +73,16 @@ impl LineMarkTable { ...@@ -73,16 +73,16 @@ impl LineMarkTable {
self.set(line_table_index + 1, immix::LineMark::ConservLive); self.set(line_table_index + 1, immix::LineMark::ConservLive);
} }
} }
#[inline(always)] #[inline(always)]
pub fn mark_line_live2(&self, space_start: Address, addr: Address) { pub fn mark_line_live2(&self, space_start: Address, addr: Address) {
let line_table_index = addr.diff(space_start) >> immix::LOG_BYTES_IN_LINE; let line_table_index = addr.diff(space_start) >> immix::LOG_BYTES_IN_LINE;
self.set(line_table_index, immix::LineMark::Live); self.set(line_table_index, immix::LineMark::Live);
if line_table_index < self.len - 1 { if line_table_index < self.len - 1 {
self.set(line_table_index + 1, immix::LineMark::ConservLive); self.set(line_table_index + 1, immix::LineMark::ConservLive);
} }
} }
} }
...@@ -231,7 +231,9 @@ impl ImmixSpace { ...@@ -231,7 +231,9 @@ impl ImmixSpace {
let mut full_blocks = 0; let mut full_blocks = 0;
let mut used_blocks_lock = self.used_blocks.lock().unwrap(); let mut used_blocks_lock = self.used_blocks.lock().unwrap();
let mut usable_blocks_lock = self.usable_blocks.lock().unwrap(); let mut usable_blocks_lock = self.usable_blocks.lock().unwrap();
usable_blocks = usable_blocks_lock.len();
let mut live_blocks : LinkedList<Box<ImmixBlock>> = LinkedList::new(); let mut live_blocks : LinkedList<Box<ImmixBlock>> = LinkedList::new();
...@@ -299,6 +301,29 @@ impl ImmixSpace { ...@@ -299,6 +301,29 @@ impl ImmixSpace {
} }
} }
use heap::Space;
impl Space for ImmixSpace {
#[inline(always)]
fn start(&self) -> Address {
self.start
}
#[inline(always)]
fn end(&self) -> Address {
self.end
}
#[inline(always)]
fn alloc_map(&self) -> *mut u8 {
self.alloc_map.ptr
}
#[inline(always)]
fn trace_map(&self) -> *mut u8 {
self.trace_map.ptr
}
}
impl ImmixBlock { impl ImmixBlock {
pub fn get_next_available_line(&self, cur_line : usize) -> Option<usize> { pub fn get_next_available_line(&self, cur_line : usize) -> Option<usize> {
let mut i = cur_line; let mut i = cur_line;
......
use utils::Address; use utils::Address;
use utils::bit_utils;
use utils::POINTER_SIZE;
use utils::LOG_POINTER_SIZE;
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use objectmodel;
pub mod immix; pub mod immix;
pub mod freelist; pub mod freelist;
pub mod gc; pub mod gc;
...@@ -16,6 +21,45 @@ lazy_static! { ...@@ -16,6 +21,45 @@ lazy_static! {
pub static ref LO_SPACE_SIZE : AtomicUsize = AtomicUsize::new( (DEFAULT_HEAP_SIZE as f64 * LO_SPACE_RATIO) as usize ); pub static ref LO_SPACE_SIZE : AtomicUsize = AtomicUsize::new( (DEFAULT_HEAP_SIZE as f64 * LO_SPACE_RATIO) as usize );
} }
pub trait Space {
#[inline(always)]
fn start(&self) -> Address;
#[inline(always)]
fn end(&self) -> Address;
#[inline(always)]
fn alloc_map(&self) -> *mut u8;
#[inline(always)]
fn trace_map(&self) -> *mut u8;
#[inline(always)]
fn is_valid_object(&self, addr: Address) -> bool {
let start = self.start();
let end = self.end();
if addr >= end || addr < start {
return false;
}
let index = (addr.diff(start) >> LOG_POINTER_SIZE) as isize;
if !bit_utils::test_nth_bit(unsafe {*self.alloc_map().offset(index)}, objectmodel::OBJ_START_BIT) {
return false;
}
if !addr.is_aligned_to(POINTER_SIZE) {
return false;
}
true
}
#[inline(always)]
fn addr_in_space(&self, addr: Address) -> bool {
addr >= self.start() && addr < self.end()
}
}
#[inline(always)] #[inline(always)]
pub fn fill_alignment_gap(start : Address, end : Address) -> () { pub fn fill_alignment_gap(start : Address, end : Address) -> () {
debug_assert!(end >= start); debug_assert!(end >= start);
......
...@@ -33,16 +33,15 @@ pub use heap::immix::LIMIT_OFFSET as ALLOCATOR_LIMIT_OFFSET; ...@@ -33,16 +33,15 @@ pub use heap::immix::LIMIT_OFFSET as ALLOCATOR_LIMIT_OFFSET;
#[repr(C)] #[repr(C)]
pub struct GC { pub struct GC {
immix_space: Arc<ImmixSpace>, immix_space: Arc<ImmixSpace>,
lo_space : Arc<RwLock<FreeListSpace>> lo_space : Arc<FreeListSpace>
} }
impl fmt::Debug for GC { impl fmt::Debug for GC {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "GC\n").unwrap(); write!(f, "GC\n").unwrap();
write!(f, "{}", self.immix_space).unwrap(); write!(f, "{}", self.immix_space).unwrap();
let lo_lock = self.lo_space.read().unwrap(); write!(f, "{}", self.lo_space)
write!(f, "{}", *lo_lock)
} }
} }
...@@ -56,7 +55,7 @@ pub extern fn gc_stats() { ...@@ -56,7 +55,7 @@ pub extern fn gc_stats() {
} }
#[no_mangle] #[no_mangle]
pub extern fn get_spaces() -> (Arc<ImmixSpace>, Arc<RwLock<FreeListSpace>>) { pub extern fn get_spaces() -> (Arc<ImmixSpace>, Arc<FreeListSpace>) {
let space_lock = MY_GC.read().unwrap(); let space_lock = MY_GC.read().unwrap();
let space = space_lock.as_ref().unwrap(); let space = space_lock.as_ref().unwrap();
...@@ -74,7 +73,7 @@ pub extern fn gc_init(immix_size: usize, lo_size: usize, n_gcthreads: usize) { ...@@ -74,7 +73,7 @@ pub extern fn gc_init(immix_size: usize, lo_size: usize, n_gcthreads: usize) {
let (immix_space, lo_space) = { let (immix_space, lo_space) = {
let immix_space = Arc::new(ImmixSpace::new(immix_size)); let immix_space = Arc::new(ImmixSpace::new(immix_size));
let lo_space = Arc::new(RwLock::new(FreeListSpace::new(lo_size))); let lo_space = Arc::new(FreeListSpace::new(lo_size));
heap::gc::init(immix_space.clone(), lo_space.clone()); heap::gc::init(immix_space.clone(), lo_space.clone());
......
mod test_gc_harness;
mod test_gcbench;
\ No newline at end of file
extern crate gc;
extern crate utils;
extern crate simple_logger;
extern crate log;
use self::log::LogLevel;
use self::gc::heap;
use self::gc::objectmodel;
use self::utils::Address;
use std::sync::atomic::Ordering;
pub fn start_logging() {
match simple_logger::init_with_level(LogLevel::Trace) {
Ok(_) => {},
Err(_) => {}
}
}
const OBJECT_SIZE : usize = 24;
const OBJECT_ALIGN: usize = 8;
const WORK_LOAD : usize = 10000;
const SPACIOUS_SPACE_SIZE : usize = 500 << 20; // 500mb