Commit f31d004e authored by qinsoon's avatar qinsoon

add an option to disable compiler inlinine, removed gc tests from mu

cargo test
parent ea493917
......@@ -453,6 +453,11 @@ impl CompilerPass for Inlining {
}
fn visit_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
if !vm.vm_options.flag_allow_inline {
info!("inlining is disabled");
return;
}
if self.check(vm, func) {
self.inline(vm, func);
......
......@@ -14,7 +14,10 @@ Usage:
VM:
--log-level=<level> logging level: none, error, warn, info, debug, trace [default: trace]
AOT Compilation:
Compiler:
--allow-inline=<flag> allow compiler inlining functions on its decision [default: true]
AOT Compiler:
--aot-emit-dir=<dir> the emit directory for ahead-of-time compiling [default: emit]
Garbage Collection:
......@@ -26,6 +29,7 @@ Garbage Collection:
#[derive(Debug, RustcDecodable, RustcEncodable)]
pub struct VMOptions {
pub flag_log_level: MuLogLevel,
pub flag_allow_inline: bool,
pub flag_aot_emit_dir: String,
pub flag_gc_immixspace_size: usize,
pub flag_gc_lospace_size: usize,
......
mod test_gc_harness;
mod test_gcbench;
\ No newline at end of file
use mu::runtime::mm;
use mu::runtime::mm::heap;
use mu::runtime::mm::objectmodel;
use mu::utils::Address;
use std::sync::atomic::Ordering;
const OBJECT_SIZE : usize = 24;
const OBJECT_ALIGN: usize = 8;
const WORK_LOAD : usize = 10000;
const IMMIX_SPACE_SIZE : usize = 500 << 20;
const LO_SPACE_SIZE : usize = 500 << 20;
#[test]
fn test_exhaust_alloc() {
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
println!("Trying to allocate {} objects of (size {}, align {}). ", WORK_LOAD, OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("This would take {} bytes of {} bytes heap", WORK_LOAD * ACTUAL_OBJECT_SIZE, heap::IMMIX_SPACE_SIZE.load(Ordering::SeqCst));
for _ in 0..WORK_LOAD {
mutator.yieldpoint();
let res = mutator.alloc(OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0011);
}
mutator.destroy();
}
const LARGE_OBJECT_SIZE : usize = 256;
#[test]
#[allow(unused_variables)]
fn test_exhaust_alloc_large() {
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
for _ in 0..WORK_LOAD {
mutator.yieldpoint();
let res = mm::muentry_alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
}
mutator.destroy();
}
#[test]
fn test_alloc_mark() {
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
let mut objs = vec![];
for _ in 0..WORK_LOAD {
let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0011);
objs.push(unsafe {res.to_object_reference()});
}
let (shared_space, _) = mm::get_spaces();
println!("Start marking");
let mark_state = objectmodel::MARK_STATE.load(Ordering::SeqCst) as u8;
let line_mark_table = shared_space.line_mark_table();
let (space_start, space_end) = (shared_space.start(), shared_space.end());
let trace_map = shared_space.trace_map.ptr;
for i in 0..objs.len() {
let obj = unsafe {*objs.get_unchecked(i)};
// mark the object as traced
objectmodel::mark_as_traced(trace_map, space_start, obj, mark_state);
// mark meta-data
if obj.to_address() >= space_start && obj.to_address() < space_end {
line_mark_table.mark_line_live2(space_start, obj.to_address());
}
}
mutator.destroy();
}
#[allow(dead_code)]
struct Node<'a> {
hdr : u64,
next : &'a Node<'a>,
unused_ptr : usize,
unused_int : i32,
unused_int2: i32
}
#[test]
fn test_alloc_trace() {
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
let (shared_space, lo_space) = mm::get_spaces();
println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
let root = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(root, 0b1100_0001);
let mut prev = root;
for _ in 0..WORK_LOAD - 1 {
let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0001);
// set prev's 1st field (offset 0) to this object
unsafe {prev.store::<Address>(res)};
prev = res;
}
println!("Start tracing");
let mut roots = vec![unsafe {root.to_object_reference()}];
heap::gc::start_trace(&mut roots, shared_space, lo_space);
mutator.destroy();
}
\ No newline at end of file
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(unused_variables)]
#![allow(dead_code)]
#![allow(unused_imports)]
use mu::runtime::mm;
use mu::runtime::mm::heap;
use mu::runtime::mm::heap::immix::ImmixMutatorLocal;
use mu::runtime::mm::heap::immix::ImmixSpace;
use mu::runtime::mm::heap::freelist;
use mu::runtime::mm::heap::freelist::FreeListSpace;
use std::mem::size_of;
use std::sync::atomic::Ordering;
extern crate time;
const IMMIX_SPACE_SIZE : usize = 40 << 20;
const LO_SPACE_SIZE : usize = 40 << 20;
const kStretchTreeDepth : i32 = 18;
const kLongLivedTreeDepth : i32 = 16;
const kArraySize : i32 = 500000;
const kMinTreeDepth : i32 = 4;
const kMaxTreeDepth : i32 = 16;
struct Node {
left : *mut Node,
right : *mut Node,
i : i32,
j : i32
}
struct Array {
value : [f64; kArraySize as usize]
}
fn init_Node(me: *mut Node, l: *mut Node, r: *mut Node) {
unsafe {
(*me).left = l;
(*me).right = r;
}
}
fn TreeSize(i: i32) -> i32{
(1 << (i + 1)) - 1
}
fn NumIters(i: i32) -> i32 {
2 * TreeSize(kStretchTreeDepth) / TreeSize(i)
}
fn Populate(iDepth: i32, thisNode: *mut Node, mutator: &mut ImmixMutatorLocal) {
if iDepth <= 0 {
return;
} else {
unsafe {
(*thisNode).left = alloc(mutator);
(*thisNode).right = alloc(mutator);
Populate(iDepth - 1, (*thisNode).left, mutator);
Populate(iDepth - 1, (*thisNode).right, mutator);
}
}
}
fn MakeTree(iDepth: i32, mutator: &mut ImmixMutatorLocal) -> *mut Node {
if iDepth <= 0 {
alloc(mutator)
} else {
let left = MakeTree(iDepth - 1, mutator);
let right = MakeTree(iDepth - 1, mutator);
let result = alloc(mutator);
init_Node(result, left, right);
result
}
}
fn PrintDiagnostics() {
}
fn TimeConstruction(depth: i32, mutator: &mut ImmixMutatorLocal) {
let iNumIters = NumIters(depth);
println!("creating {} trees of depth {}", iNumIters, depth);
let tStart = time::now_utc();
for _ in 0..iNumIters {
let tempTree = alloc(mutator);
Populate(depth, tempTree, mutator);
// destroy tempTree
}
let tFinish = time::now_utc();
println!("\tTop down construction took {} msec", (tFinish - tStart).num_milliseconds());
let tStart = time::now_utc();
for _ in 0..iNumIters {
let tempTree = MakeTree(depth, mutator);
}
let tFinish = time::now_utc();
println!("\tButtom up construction took {} msec", (tFinish - tStart).num_milliseconds());
}
#[inline(always)]
fn alloc(mutator: &mut ImmixMutatorLocal) -> *mut Node {
let addr = mutator.alloc(size_of::<Node>(), 8);
mutator.init_object(addr, 0b1100_0011);
// objectmodel::init_header(unsafe{addr.to_object_reference()}, HEADER_INIT_U64);
addr.to_ptr_mut::<Node>()
}
#[test]
fn start() {
unsafe {heap::gc::set_low_water_mark();}
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
mm::gc_stats();
let mut mutator = mm::new_mutator();
println!("Garbage Collector Test");
println!(" Live storage will peak at {} bytes.\n",
2 * (size_of::<Node>() as i32) * TreeSize(kLongLivedTreeDepth) +
(size_of::<Array>() as i32));
println!(" Stretching memory with a binary tree or depth {}", kStretchTreeDepth);
PrintDiagnostics();
let tStart = time::now_utc();
// Stretch the memory space quickly
let tempTree = MakeTree(kStretchTreeDepth, &mut mutator);
// destroy tree
// Create a long lived object
println!(" Creating a long-lived binary tree of depth {}", kLongLivedTreeDepth);
let longLivedTree = alloc(&mut mutator);
Populate(kLongLivedTreeDepth, longLivedTree, &mut mutator);
println!(" Creating a long-lived array of {} doubles", kArraySize);
// mm::alloc_large(&mut mutator, size_of::<Array>(), 8);
PrintDiagnostics();
let mut d = kMinTreeDepth;
while d <= kMaxTreeDepth {
TimeConstruction(d, &mut mutator);
d += 2;
}
if longLivedTree.is_null() {
println!("Failed(long lived tree wrong)");
}
// if array.array[1000] != 1.0f64 / (1000 as f64) {
// println!("Failed(array element wrong)");
// }
let tFinish = time::now_utc();
let tElapsed = (tFinish - tStart).num_milliseconds();
PrintDiagnostics();
println!("Completed in {} msec", tElapsed);
println!("Finished with {} collections", heap::gc::GC_COUNT.load(Ordering::SeqCst));
mutator.destroy();
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment