GitLab will be upgraded on June 2nd 2020 at 2.00 pm (AEDT) to 3.00 pm (AEDT) due to Critical Security Patch Availability. During the update, GitLab and Mattermost services will not be available. If you have any concerns with this, please talk to local Gitlab admin team.

Commit 0cdbaac9 authored by qinsoon's avatar qinsoon

add linked list test, fixed gcbench

parent 15a77cb1
......@@ -304,7 +304,7 @@ impl Treadmill {
impl fmt::Display for Treadmill {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "next: {}", self.from_space_next).unwrap();
write!(f, "next: {}\n", self.from_space_next).unwrap();
write!(f, "from:").unwrap();
for i in 0..self.spaces[self.from].len() {
write!(f, "{}->", self.spaces[self.from][i]).unwrap();
......
......@@ -142,11 +142,11 @@ impl ImmixMutatorLocal {
#[inline(always)]
pub fn alloc(&mut self, size: usize, align: usize) -> Address {
let size = size + objectmodel::OBJECT_HEADER_SIZE;
let start = self.cursor.align_up(align);
let end = start.plus(size);
let size = size + objectmodel::OBJECT_HEADER_SIZE;
if end > self.limit {
let ret = self.try_alloc_from_local(size, align);
......@@ -238,8 +238,9 @@ impl ImmixMutatorLocal {
match new_block {
Some(mut b) => {
// zero the block
b.lazy_zeroing();
// zero the block - do not need to zero the block here
// we zero lines that get used in try_alloc_from_local()
// b.lazy_zeroing();
self.block = Some(b);
self.cursor = self.block().start();
......@@ -264,9 +265,26 @@ impl ImmixMutatorLocal {
fn return_block(&mut self) {
if self.block.is_some() {
trace!("finishing block {:?}", self.block.as_ref().unwrap());
if cfg!(debug_assertions) {
let block = self.block.as_ref().unwrap();
ImmixMutatorLocal::sanity_check_finished_block(block);
}
self.space.return_used_block(self.block.take().unwrap());
}
}
#[cfg(feature = "use-sidemap")]
fn sanity_check_finished_block(block: &ImmixBlock) {
}
#[cfg(not(feature = "use-sidemap"))]
fn sanity_check_finished_block(block: &ImmixBlock) {
}
fn block(&mut self) -> &mut ImmixBlock {
self.block.as_mut().unwrap()
}
......
......@@ -150,6 +150,8 @@ pub extern fn remove_root(obj: ObjectReference) {
gc.as_mut().unwrap().roots.remove(&obj);
}
// yieldpoint
#[no_mangle]
#[inline(always)]
pub extern fn yieldpoint(mutator: *mut ImmixMutatorLocal) {
......@@ -162,6 +164,8 @@ pub extern fn yieldpoint_slow(mutator: *mut ImmixMutatorLocal) {
unsafe {mutator.as_mut().unwrap()}.yieldpoint_slow()
}
// allocation
#[no_mangle]
#[inline(always)]
pub extern fn alloc(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
......@@ -196,4 +200,10 @@ pub extern fn muentry_alloc_large(mutator: *mut ImmixMutatorLocal, size: usize,
#[allow(unused_variables)]
pub extern fn muentry_init_large_object(mutator: *mut ImmixMutatorLocal, obj: ObjectReference, encode: u64) {
MY_GC.read().unwrap().as_ref().unwrap().lo_space.init_object(obj.to_address(), encode);
}
// force gc
#[no_mangle]
pub extern fn force_gc() {
heap::gc::trigger_gc();
}
\ No newline at end of file
......@@ -59,7 +59,7 @@ pub fn print_object(obj: Address) {
trace!("- has type ID: {}", header_get_gctype_id(hdr));
}
} else {
trace!("more info about hybrid, not implemented");
trace!("- more info about hybrid, not implemented");
}
trace!("0x{:x} | val: 0x{:15x} | hdr: {:b}",
......
mod test_gc_harness;
mod test_gcbench;
\ No newline at end of file
mod test_gcbench;
mod test_gc_linked_list;
\ No newline at end of file
extern crate gc;
extern crate utils;
use std::ptr;
extern crate simple_logger;
extern crate log;
use self::log::LogLevel;
use std::fmt;
pub fn start_logging() {
match simple_logger::init_with_level(LogLevel::Trace) {
Ok(_) => {},
Err(_) => {}
}
}
#[derive(Copy, Clone)]
struct Node {
next : *mut Node,
payload: usize
}
struct LinkedList<'a> {
head: *mut Node,
tail: *mut Node,
len : usize,
allocator: &'a mut ImmixMutatorLocal
}
impl <'a> LinkedList<'a> {
fn new(allocator: &mut ImmixMutatorLocal) -> LinkedList {
LinkedList {
head: ptr::null_mut(),
tail: ptr::null_mut(),
len : 0,
allocator: allocator
}
}
fn add(&mut self, val: usize) {
if self.head.is_null() {
let node = Node::new(self.allocator, val);
self.head = node;
self.tail = node;
self.len = 1;
} else {
let node = Node::new(self.allocator, val);
unsafe {
(*self.tail).next = node;
}
self.tail = node;
self.len += 1;
}
}
fn verify(&self, expect: Vec<usize>) {
if self.len != expect.len() {
panic!("Linked List length: {}, expected length: {}", self.len, expect.len());
}
let mut i = 0;
let mut cursor = self.head;
while cursor != self.tail {
println!("-verifying {:?}-", cursor);
println!("{:?}", unsafe {*cursor});
let val = unsafe {(*cursor).payload};
let expect_val = expect[i];
if val != expect_val {
panic!("Linked List[{}] = {}, expect[{}] = {}", i, val, i, expect_val);
}
cursor = unsafe {(*cursor).next};
i += 1;
}
}
}
impl <'a> fmt::Debug for LinkedList<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut cursor = self.head;
// non-tail
while cursor != self.tail {
write!(f, "{:?}, ", unsafe {*cursor}).unwrap();
cursor = unsafe{*cursor}.next;
}
write!(f, "{:?}", unsafe {*cursor}).unwrap();
Ok(())
}
}
use self::gc::heap;
use self::gc::heap::immix::ImmixMutatorLocal;
use self::gc::heap::immix::ImmixSpace;
use self::gc::heap::freelist;
use self::gc::heap::freelist::FreeListSpace;
use self::gc::objectmodel;
use self::utils::{ObjectReference, Address};
use std::mem::size_of;
#[cfg(feature = "use-sidemap")]
const NODE_ENCODE : u64 = 0b1100_0001u64;
#[cfg(not(feature = "use-sidemap"))]
const NODE_ENCODE : u64 = 0xb000000000000001u64;
impl Node {
fn new(mutator: &mut ImmixMutatorLocal, val: usize) -> *mut Node {
println!("-allocating Node({})-", val);
let addr = mutator.alloc(size_of::<Node>(), 8);
println!("returns address {}", addr);
mutator.init_object(addr, NODE_ENCODE);
let ptr = addr.to_ptr_mut::<Node>();
println!("as pointer {:?}", ptr);
unsafe {
(*ptr).payload = val;
}
println!("result: {:?}", unsafe {*ptr});
ptr
}
}
impl fmt::Debug for Node {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Node({})", self.payload)
}
}
const IMMIX_SPACE_SIZE : usize = 40 << 20;
const LO_SPACE_SIZE : usize = 0 << 20;
#[test]
fn create_linked_list() {
unsafe {heap::gc::set_low_water_mark();}
start_logging();
gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 1);
gc::gc_stats();
let mut mutator = gc::new_mutator();
{
let mut linked_list = LinkedList::new(&mut mutator);
const N: usize = 5;
for i in 0..N {
linked_list.add(i);
println!("after add: {:?}", linked_list);
}
linked_list.verify((0..N).collect());
}
mutator.destroy();
}
#[test]
#[ignore]
// disable this test because it will cause gcbench fail for unknown reason
fn linked_list_survive_gc() {
unsafe {heap::gc::set_low_water_mark();}
start_logging();
gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 1);
gc::gc_stats();
let mut mutator = gc::new_mutator();
{
let mut linked_list = LinkedList::new(&mut mutator);
const N: usize = 5;
for i in 0..N {
linked_list.add(i);
println!("after add: {:?}", linked_list);
}
// check
linked_list.verify((0..N).collect());
// put head as gc root
let head_addr = Address::from_mut_ptr(linked_list.head);
gc::add_to_root(unsafe { head_addr.to_object_reference() });
// force gc
gc::force_gc();
// check
linked_list.verify((0..N).collect());
}
mutator.destroy();
}
\ No newline at end of file
......@@ -7,6 +7,7 @@
extern crate gc;
extern crate time;
extern crate utils;
use self::gc::heap;
use self::gc::heap::immix::ImmixMutatorLocal;
......@@ -14,6 +15,7 @@ use self::gc::heap::immix::ImmixSpace;
use self::gc::heap::freelist;
use self::gc::heap::freelist::FreeListSpace;
use self::gc::objectmodel;
use self::utils::{ObjectReference, Address};
use std::mem::size_of;
use std::sync::atomic::Ordering;
......@@ -157,6 +159,7 @@ fn start() {
let mut mutator = gc::new_mutator();
println!("Garbage Collector Test");
println!(" Node size = {}", size_of::<Node>());
println!(" Live storage will peak at {} bytes.\n",
2 * (size_of::<Node>() as i32) * TreeSize(kLongLivedTreeDepth) +
(size_of::<Array>() as i32));
......@@ -173,6 +176,7 @@ fn start() {
println!(" Creating a long-lived binary tree of depth {}", kLongLivedTreeDepth);
let longLivedTree = alloc(&mut mutator);
Populate(kLongLivedTreeDepth, longLivedTree, &mut mutator);
gc::add_to_root(unsafe{Address::from_mut_ptr(longLivedTree).to_object_reference()});
println!(" Creating a long-lived array of {} doubles", kArraySize);
// mm::alloc_large(&mut mutator, size_of::<Array>(), 8);
......
......@@ -71,6 +71,10 @@ impl Address {
unsafe {mem::transmute(ptr)}
}
#[inline(always)]
pub fn from_mut_ptr<T> (ptr: *mut T) -> Address {
unsafe {mem::transmute(ptr)}
}
#[inline(always)]
pub fn to_ptr<T> (&self) -> *const T {
unsafe {mem::transmute(self.0)}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment