Commit 953861eb authored by qinsoon's avatar qinsoon

add a few more tests for gc/alloc

parent bcb66155
......@@ -4,6 +4,10 @@ version = "0.0.1"
authors = ["qinsoon <qinsoon@gmail.com>"]
build = "build.rs"
[features]
default = ["parallel-gc"]
parallel-gc = []
[lib]
crate-type = ["rlib"]
......
use std::mem;
use utils::POINTER_SIZE;
use utils::LOG_POINTER_SIZE;
use utils::Address;
use heap::gc::malloc_zero;
......@@ -20,6 +21,16 @@ impl <T> AddressMap<T> where T: Copy{
AddressMap{start: start, end: end, ptr: ptr, len: len}
}
pub fn init_all (&self, init: T) {
println!("check valid");
let mut cursor = self.start;
while cursor < self.end {
self.set(cursor, init);
cursor = cursor.plus(POINTER_SIZE);
}
}
#[inline(always)]
pub fn set(&self, addr: Address, value: T) {
let index = (addr.diff(self.start) >> LOG_POINTER_SIZE) as isize;
......
......@@ -8,19 +8,19 @@ use objectmodel;
use common::AddressMap;
use utils::{Address, ObjectReference};
use utils::POINTER_SIZE;
use utils::{LOG_POINTER_SIZE, POINTER_SIZE};
use utils::bit_utils;
use std::sync::atomic::{AtomicIsize, Ordering};
use std::sync::{Arc, Mutex, Condvar, RwLock};
#[cfg(feature = "mt-trace")]
#[cfg(feature = "parallel-gc")]
use crossbeam::sync::chase_lev::*;
#[cfg(feature = "mt-trace")]
#[cfg(feature = "parallel-gc")]
use std::sync::mpsc;
#[cfg(feature = "mt-trace")]
#[cfg(feature = "parallel-gc")]
use std::sync::mpsc::channel;
#[cfg(feature = "mt-trace")]
#[cfg(feature = "parallel-gc")]
use std::thread;
use std::sync::atomic;
......@@ -83,12 +83,14 @@ extern "C" {
}
#[inline(always)]
pub fn is_valid_object(addr: Address, start: Address, end: Address, live_map: &AddressMap<u8>) -> bool {
pub fn is_valid_object(addr: Address, start: Address, end: Address, live_map: *mut u8) -> bool {
if addr >= end || addr < start {
return false;
}
bit_utils::test_nth_bit(live_map.get(addr), objectmodel::OBJ_START_BIT)
let index = (addr.diff(start) >> LOG_POINTER_SIZE) as isize;
bit_utils::test_nth_bit(unsafe {*live_map.offset(index)}, objectmodel::OBJ_START_BIT)
}
pub fn stack_scan() -> Vec<ObjectReference> {
......@@ -104,7 +106,7 @@ pub fn stack_scan() -> Vec<ObjectReference> {
while cursor < low_water_mark {
let value : Address = unsafe {cursor.load::<Address>()};
if is_valid_object(value, immix_space.start(), immix_space.end(), &immix_space.alloc_map) {
if is_valid_object(value, immix_space.start(), immix_space.end(), immix_space.alloc_map.ptr) {
ret.push(unsafe {value.to_object_reference()});
}
......@@ -119,7 +121,7 @@ pub fn stack_scan() -> Vec<ObjectReference> {
for i in 0..registers_count {
let value = unsafe {*registers.offset(i as isize)};
if is_valid_object(value, immix_space.start(), immix_space.end(), &immix_space.alloc_map) {
if is_valid_object(value, immix_space.start(), immix_space.end(), immix_space.alloc_map.ptr) {
ret.push(unsafe {value.to_object_reference()});
}
}
......@@ -251,7 +253,7 @@ pub static GC_THREADS : atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
#[allow(unused_variables)]
#[inline(never)]
#[cfg(feature = "mt-trace")]
#[cfg(feature = "parallel-gc")]
pub fn start_trace(work_stack: &mut Vec<ObjectReference>, immix_space: Arc<ImmixSpace>, lo_space: Arc<RwLock<FreeListSpace>>) {
// creates root deque
let (mut worker, stealer) = deque();
......@@ -295,7 +297,7 @@ pub fn start_trace(work_stack: &mut Vec<ObjectReference>, immix_space: Arc<Immix
#[allow(unused_variables)]
#[inline(never)]
#[cfg(not(feature = "mt-trace"))]
#[cfg(not(feature = "parallel-gc"))]
pub fn start_trace(local_queue: &mut Vec<ObjectReference>, immix_space: Arc<ImmixSpace>, lo_space: Arc<RwLock<FreeListSpace>>) {
let mark_state = objectmodel::MARK_STATE.load(Ordering::SeqCst) as u8;
......@@ -305,7 +307,7 @@ pub fn start_trace(local_queue: &mut Vec<ObjectReference>, immix_space: Arc<Immi
}
#[allow(unused_variables)]
#[cfg(feature = "mt-trace")]
#[cfg(feature = "parallel-gc")]
fn start_steal_trace(stealer: Stealer<ObjectReference>, job_sender:mpsc::Sender<ObjectReference>, immix_space: Arc<ImmixSpace>, lo_space: Arc<RwLock<FreeListSpace>>) {
use objectmodel;
......@@ -335,9 +337,20 @@ fn start_steal_trace(stealer: Stealer<ObjectReference>, job_sender:mpsc::Sender<
}
#[inline(always)]
#[cfg(feature = "mt-trace")]
#[cfg(feature = "parallel-gc")]
pub fn steal_trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectReference>, job_sender: &mpsc::Sender<ObjectReference>, alloc_map: *mut u8, trace_map: *mut u8, line_mark_table: &ImmixLineMarkTable, immix_start: Address, immix_end: Address, mark_state: u8, lo_space: &Arc<RwLock<FreeListSpace>>) {
use objectmodel;
if cfg!(debug_assertions) {
// check if this object in within the heap, if it is an object
if !is_valid_object(obj.to_address(), immix_start, immix_end, alloc_map) {
println!("trying to trace an object that is not valid");
println!("address: 0x{:x}", obj);
println!("---");
println!("immix space: 0x{:x} - 0x{:x}", immix_start, immix_end);
println!("lo space: {}", *lo_space.read().unwrap());
panic!("invalid object during tracing");
}
}
objectmodel::mark_as_traced(trace_map, immix_start, obj, mark_state);
......@@ -352,20 +365,20 @@ pub fn steal_trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectRefe
let mut base = addr;
loop {
let value = objectmodel::get_ref_byte(alloc_map, immix_start, obj);
let (ref_bits, short_encode) = (common::lower_bits(value, objectmodel::REF_BITS_LEN), common::test_nth_bit(value, objectmodel::SHORT_ENCODE_BIT));
let (ref_bits, short_encode) = (bit_utils::lower_bits(value, objectmodel::REF_BITS_LEN), bit_utils::test_nth_bit(value, objectmodel::SHORT_ENCODE_BIT));
match ref_bits {
0b0000_0001 => {
steal_process_edge(base, local_queue, trace_map, immix_start, job_sender, mark_state);
steal_process_edge(base, 0, local_queue, alloc_map, trace_map, immix_start, immix_end, job_sender, mark_state);
},
0b0000_0011 => {
steal_process_edge(base, local_queue, trace_map, immix_start, job_sender, mark_state);
steal_process_edge(base.plus(8), local_queue, trace_map, immix_start, job_sender, mark_state);
steal_process_edge(base, 0, local_queue, alloc_map, trace_map, immix_start, immix_end, job_sender, mark_state);
steal_process_edge(base, 8, local_queue, alloc_map, trace_map, immix_start, immix_end, job_sender, mark_state);
},
0b0000_1111 => {
steal_process_edge(base, local_queue, trace_map, immix_start, job_sender, mark_state);
steal_process_edge(base.plus(8), local_queue, trace_map, immix_start, job_sender, mark_state);
steal_process_edge(base.plus(16), local_queue, trace_map, immix_start, job_sender, mark_state);
steal_process_edge(base.plus(24), local_queue, trace_map, immix_start, job_sender, mark_state);
steal_process_edge(base, 0, local_queue, alloc_map, trace_map, immix_start, immix_end, job_sender, mark_state);
steal_process_edge(base, 8, local_queue, alloc_map, trace_map, immix_start, immix_end, job_sender, mark_state);
steal_process_edge(base, 16,local_queue, alloc_map, trace_map, immix_start, immix_end, job_sender, mark_state);
steal_process_edge(base, 24,local_queue, alloc_map, trace_map, immix_start, immix_end, job_sender, mark_state);
},
_ => {
panic!("unexpcted ref_bits patterns: {:b}", ref_bits);
......@@ -376,23 +389,37 @@ pub fn steal_trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectRefe
if short_encode {
return;
} else {
base = base.plus(objectmodel::REF_BITS_LEN * 8);
base = base.plus(objectmodel::REF_BITS_LEN * POINTER_SIZE);
}
}
}
#[inline(always)]
#[cfg(feature = "mt-trace")]
pub fn steal_process_edge(addr: Address, local_queue:&mut Vec<ObjectReference>, trace_map: *mut u8, immix_start: Address, job_sender: &mpsc::Sender<ObjectReference>, mark_state: u8) {
use objectmodel;
let obj_addr = unsafe{addr.load::<ObjectReference>()};
#[cfg(feature = "parallel-gc")]
pub fn steal_process_edge(base: Address, offset: usize, local_queue:&mut Vec<ObjectReference>, alloc_map: *mut u8, trace_map: *mut u8, immix_start: Address, immix_end: Address, job_sender: &mpsc::Sender<ObjectReference>, mark_state: u8) {
let field_addr = base.plus(offset);
let edge = unsafe{field_addr.load::<ObjectReference>()};
if cfg!(debug_assertions) {
// check if this object in within the heap, if it is an object
if !is_valid_object(edge.to_address(), immix_start, immix_end, alloc_map) {
println!("trying to follow an edge that is not a valid object");
println!("edge address: 0x{:x}", edge);
println!("base address: 0x{:x}", base);
println!("---");
objectmodel::print_object(base, immix_start, trace_map, alloc_map);
println!("---");
println!("immix space: 0x{:x} - 0x{:x}", immix_start, immix_end);
panic!("invalid object during tracing");
}
}
if !obj_addr.to_address().is_zero() && !objectmodel::is_traced(trace_map, immix_start, obj_addr, mark_state) {
if !edge.to_address().is_zero() && !objectmodel::is_traced(trace_map, immix_start, edge, mark_state) {
if local_queue.len() >= PUSH_BACK_THRESHOLD {
job_sender.send(obj_addr).unwrap();
job_sender.send(edge).unwrap();
} else {
local_queue.push(obj_addr);
local_queue.push(edge);
}
}
}
......
......@@ -226,6 +226,7 @@ impl ImmixMutatorLocal {
fn return_block(&mut self) {
if self.block.is_some() {
trace!("finishing block {:?}", self.block.as_ref().unwrap());
self.space.return_used_block(self.block.take().unwrap());
}
}
......
......@@ -150,14 +150,24 @@ impl ImmixSpace {
let line_mark_table = LineMarkTable::new(start, end);
let trace_map = AddressMap::new(start, end);
if cfg!(debug_assertions) {
// access every of its cells
trace_map.init_all(0);
}
let alloc_map = AddressMap::new(start, end);
if cfg!(debug_assertions) {
alloc_map.init_all(0);
}
let mut ret = ImmixSpace {
start: start,
end: end,
mmap: anon_mmap,
line_mark_table: line_mark_table,
trace_map: Arc::new(AddressMap::new(start, end)),
alloc_map: Arc::new(AddressMap::new(start, end)),
trace_map: Arc::new(trace_map),
alloc_map: Arc::new(alloc_map),
usable_blocks: Mutex::new(LinkedList::new()),
used_blocks: Mutex::new(LinkedList::new()),
total_blocks: 0
......@@ -369,6 +379,12 @@ impl fmt::Display for ImmixSpace {
}
impl fmt::Display for ImmixBlock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ImmixBlock#{}(state={:?}, address=0x{:X})", self.id, self.state, self.start)
}
}
impl fmt::Debug for ImmixBlock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ImmixBlock#{}(state={:?}, address={:#X}, line_table={:?}", self.id, self.state, self.start, self.line_mark_table.ptr).unwrap();
......
......@@ -116,6 +116,7 @@ pub extern fn alloc(mutator: &mut Box<ImmixMutatorLocal>, size: usize, align: us
}
#[no_mangle]
#[inline(never)]
pub extern fn alloc_slow(mutator: &mut Box<ImmixMutatorLocal>, size: usize, align: usize) -> ObjectReference {
let ret = mutator.try_alloc_from_local(size, align);
unsafe {ret.to_object_reference()}
......
......@@ -2,7 +2,8 @@ use std::sync::atomic;
pub static MARK_STATE : atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
use utils::{Address, ObjectReference};
use utils::LOG_POINTER_SIZE;
use utils::{LOG_POINTER_SIZE, POINTER_SIZE};
use utils::bit_utils;
pub fn init() {
MARK_STATE.store(1, atomic::Ordering::SeqCst);
......@@ -17,7 +18,61 @@ pub fn flip_mark_state() {
}
}
pub fn print_object(obj: Address, space_start: Address, trace_map: *mut u8, alloc_map: *mut u8) {
let mut cursor = obj;
println!("OBJECT 0x{:x}", obj);
loop {
let hdr = get_ref_byte(alloc_map, space_start, unsafe {cursor.to_object_reference()});
let (ref_bits, short_encode) = (
bit_utils::lower_bits(hdr, REF_BITS_LEN),
bit_utils::test_nth_bit(hdr, SHORT_ENCODE_BIT)
);
println!("0x{:x} | val: 0x{:x} | {}, hdr: {:b}",
cursor, unsafe{cursor.load::<u64>()}, interpret_hdr_for_print_object(hdr, 0), hdr);
cursor = cursor.plus(POINTER_SIZE);
println!("0x{:x} | val: 0x{:x} | {}",
cursor, unsafe{cursor.load::<u64>()}, interpret_hdr_for_print_object(hdr, 1));
cursor = cursor.plus(POINTER_SIZE);
println!("0x{:x} | val: 0x{:x} | {}",
cursor, unsafe{cursor.load::<u64>()}, interpret_hdr_for_print_object(hdr, 2));
cursor = cursor.plus(POINTER_SIZE);
println!("0x{:x} | val: 0x{:x} | {}",
cursor, unsafe{cursor.load::<u64>()}, interpret_hdr_for_print_object(hdr, 3));
cursor = cursor.plus(POINTER_SIZE);
println!("0x{:x} | val: 0x{:x} | {}",
cursor, unsafe{cursor.load::<u64>()}, interpret_hdr_for_print_object(hdr, 4));
cursor = cursor.plus(POINTER_SIZE);
println!("0x{:x} | val: 0x{:x} | {}, {}",
cursor, unsafe{cursor.load::<u64>()}, interpret_hdr_for_print_object(hdr, 5),
{
if !short_encode {
"MORE"
} else {
""
}
});
if short_encode {
return;
}
}
}
// index between 0 and 5
fn interpret_hdr_for_print_object(hdr: u8, index: usize) -> &'static str {
if bit_utils::test_nth_bit(hdr, index) {
"REF "
} else {
"NON-REF"
}
}
#[inline(always)]
pub fn mark_as_traced(trace_map: *mut u8, space_start: Address, obj: ObjectReference, mark_state: u8) {
......
mod test_thread;
mod test_alloc;
mod test_gc;
//disable this for now - it doesnt run
//mod test_gcbench;
\ No newline at end of file
mod test_gcbench;
\ No newline at end of file
use mu::runtime::mm;
use mu::runtime::mm::heap;
use mu::runtime::mm::heap::immix::ImmixMutatorLocal;
use mu::runtime::mm::heap::immix::ImmixSpace;
use mu::runtime::mm::heap::freelist::FreeListSpace;
use mu::runtime::mm::objectmodel;
use mu::utils::Address;
use std::sync::RwLock;
use std::sync::Arc;
use std::sync::atomic::Ordering;
const OBJECT_SIZE : usize = 24;
const OBJECT_ALIGN: usize = 8;
const WORK_LOAD : usize = 500000;
const IMMIX_SPACE_SIZE : usize = 500 << 20;
const LO_SPACE_SIZE : usize = 500 << 20;
#[test]
fn test_exhaust_alloc() {
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
println!("Trying to allocate {} objects of (size {}, align {}). ", WORK_LOAD, OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("This would take {} bytes of {} bytes heap", WORK_LOAD * ACTUAL_OBJECT_SIZE, heap::IMMIX_SPACE_SIZE.load(Ordering::SeqCst));
for _ in 0..WORK_LOAD {
mutator.yieldpoint();
let res = mutator.alloc(OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0011);
}
}
const LARGE_OBJECT_SIZE : usize = 256;
#[test]
fn test_exhaust_alloc_large() {
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
for _ in 0..WORK_LOAD {
mutator.yieldpoint();
let res = mm::alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
}
}
#[test]
fn test_alloc_mark() {
heap::IMMIX_SPACE_SIZE.store(IMMIX_SPACE_SIZE, Ordering::SeqCst);
heap::LO_SPACE_SIZE.store(LO_SPACE_SIZE, Ordering::SeqCst);
let shared_space : Arc<ImmixSpace> = {
let space : ImmixSpace = ImmixSpace::new(heap::IMMIX_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(space)
};
let lo_space : Arc<RwLock<FreeListSpace>> = {
let space : FreeListSpace = FreeListSpace::new(heap::LO_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(RwLock::new(space))
};
heap::gc::init(shared_space.clone(), lo_space.clone());
let mut mutator = ImmixMutatorLocal::new(shared_space.clone());
println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
let mut objs = vec![];
for _ in 0..WORK_LOAD {
let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0011);
objs.push(unsafe {res.to_object_reference()});
}
println!("Start marking");
let mark_state = objectmodel::MARK_STATE.load(Ordering::SeqCst) as u8;
let line_mark_table = shared_space.line_mark_table();
let (space_start, space_end) = (shared_space.start(), shared_space.end());
let trace_map = shared_space.trace_map.ptr;
for i in 0..objs.len() {
let obj = unsafe {*objs.get_unchecked(i)};
// mark the object as traced
objectmodel::mark_as_traced(trace_map, space_start, obj, mark_state);
// mark meta-data
if obj.to_address() >= space_start && obj.to_address() < space_end {
line_mark_table.mark_line_live2(space_start, obj.to_address());
}
}
}
#[allow(dead_code)]
struct Node<'a> {
hdr : u64,
next : &'a Node<'a>,
unused_ptr : usize,
unused_int : i32,
unused_int2: i32
}
#[test]
fn test_alloc_trace() {
heap::IMMIX_SPACE_SIZE.store(IMMIX_SPACE_SIZE, Ordering::SeqCst);
heap::LO_SPACE_SIZE.store(LO_SPACE_SIZE, Ordering::SeqCst);
let shared_space : Arc<ImmixSpace> = {
let space : ImmixSpace = ImmixSpace::new(heap::IMMIX_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(space)
};
let lo_space : Arc<RwLock<FreeListSpace>> = {
let space : FreeListSpace = FreeListSpace::new(heap::LO_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(RwLock::new(space))
};
heap::gc::init(shared_space.clone(), lo_space.clone());
let mut mutator = ImmixMutatorLocal::new(shared_space.clone());
println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
let root = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(root, 0b1100_0001);
let mut prev = root;
for _ in 0..WORK_LOAD - 1 {
let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0001);
// set prev's 1st field (offset 0) to this object
unsafe {prev.store::<Address>(res)};
prev = res;
}
println!("Start tracing");
let mut roots = vec![unsafe {root.to_object_reference()}];
heap::gc::start_trace(&mut roots, shared_space, lo_space);
}
\ No newline at end of file
use mu::runtime::mm;
use mu::runtime::mm::heap;
use mu::runtime::mm::heap::immix::ImmixMutatorLocal;
use mu::runtime::mm::heap::immix::ImmixSpace;
use mu::runtime::mm::heap::freelist::FreeListSpace;
use mu::runtime::mm::objectmodel;
use mu::utils::Address;
use std::sync::RwLock;
use std::sync::Arc;
use std::sync::atomic::Ordering;
const OBJECT_SIZE : usize = 24;
const OBJECT_ALIGN: usize = 8;
const WORK_LOAD : usize = 500000;
const WORK_LOAD : usize = 50000000;
const IMMIX_SPACE_SIZE : usize = 500 << 20;
const LO_SPACE_SIZE : usize = 500 << 20;
const IMMIX_SPACE_SIZE : usize = 40 << 20;
const LO_SPACE_SIZE : usize = 40 << 20;
#[test]
fn test_exhaust_alloc() {
fn test_gc_no_alive() {
unsafe {heap::gc::set_low_water_mark();}
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
......@@ -34,108 +28,4 @@ fn test_exhaust_alloc() {
let res = mutator.alloc(OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0011);
}
}
#[test]
fn test_alloc_mark() {
heap::IMMIX_SPACE_SIZE.store(IMMIX_SPACE_SIZE, Ordering::SeqCst);
heap::LO_SPACE_SIZE.store(LO_SPACE_SIZE, Ordering::SeqCst);
let shared_space : Arc<ImmixSpace> = {
let space : ImmixSpace = ImmixSpace::new(heap::IMMIX_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(space)
};
let lo_space : Arc<RwLock<FreeListSpace>> = {
let space : FreeListSpace = FreeListSpace::new(heap::LO_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(RwLock::new(space))
};
heap::gc::init(shared_space.clone(), lo_space.clone());
let mut mutator = ImmixMutatorLocal::new(shared_space.clone());
println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
let mut objs = vec![];
for _ in 0..WORK_LOAD {
let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0011);
objs.push(unsafe {res.to_object_reference()});
}
println!("Start marking");
let mark_state = objectmodel::MARK_STATE.load(Ordering::SeqCst) as u8;
let line_mark_table = shared_space.line_mark_table();
let (space_start, space_end) = (shared_space.start(), shared_space.end());
let trace_map = shared_space.trace_map.ptr;
for i in 0..objs.len() {
let obj = unsafe {*objs.get_unchecked(i)};
// mark the object as traced
objectmodel::mark_as_traced(trace_map, space_start, obj, mark_state);
// mark meta-data
if obj.to_address() >= space_start && obj.to_address() < space_end {
line_mark_table.mark_line_live2(space_start, obj.to_address());
}
}
}
#[allow(dead_code)]
struct Node<'a> {
hdr : u64,
next : &'a Node<'a>,
unused_ptr : usize,
unused_int : i32,
unused_int2: i32
}
#[test]
fn test_alloc_trace() {
heap::IMMIX_SPACE_SIZE.store(IMMIX_SPACE_SIZE, Ordering::SeqCst);
heap::LO_SPACE_SIZE.store(LO_SPACE_SIZE, Ordering::SeqCst);
let shared_space : Arc<ImmixSpace> = {
let space : ImmixSpace = ImmixSpace::new(heap::IMMIX_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(space)
};
let lo_space : Arc<RwLock<FreeListSpace>> = {
let space : FreeListSpace = FreeListSpace::new(heap::LO_SPACE_SIZE.load(Ordering::SeqCst));
Arc::new(RwLock::new(space))
};
heap::gc::init(shared_space.clone(), lo_space.clone());
let mut mutator = ImmixMutatorLocal::new(shared_space.clone());
println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
let root = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(root, 0b1100_0001);
let mut prev = root;
for _ in 0..WORK_LOAD - 1 {
let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0001);
// set prev's 1st field (offset 0) to this object
unsafe {prev.store::<Address>(res)};
prev = res;
}
println!("Start tracing");
let mut roots = vec![unsafe {root.to_object_reference()}];
heap::gc::start_trace(&mut roots, shared_space, lo_space);
}
\ No newline at end of file
......@@ -142,7 +142,7 @@ fn start() {
Populate(kLongLivedTreeDepth, longLivedTree, &mut mutator);
println!(" Creating a long-lived array of {} doubles", kArraySize);
mm::alloc_large(&mut mutator, size_of::<Array>(), 8);
// mm::alloc_large(&mut mutator, size_of::<Array>(), 8);
PrintDiagnostics();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment