GitLab will continue to be upgraded from 11.4.5-ce.0 on November 25th 2019 at 4.00pm (AEDT) to 5.00pm (AEDT) due to Critical Security Patch Availability. During the update, GitLab and Mattermost services will not be available.

Commit 996e71a2 authored by qinsoon's avatar qinsoon

[wip] keep working on treadmill. Going to change my implementation.

Instead of using a cyclic doubly linked list, I am going to use two
doubly linked list as fromspace and tospace. This will greatly simplify
implementation.
parent aaa02167
Pipeline #230 failed with stage
in 18 minutes and 36 seconds
......@@ -15,6 +15,8 @@ use std::sync::Mutex;
const SPACE_ALIGN : usize = 1 << 19;
const BLOCK_SIZE : usize = 1 << 12; // 4kb
const TRACE_TREADMILL : bool = false;
#[repr(C)]
pub struct FreeListSpace {
start : Address,
......@@ -67,7 +69,9 @@ impl FreeListSpace {
size / BLOCK_SIZE + 1
};
if TRACE_TREADMILL {
trace!("before allocation, space: {}", self);
}
trace!("requiring {} bytes ({} blocks)", size, blocks_needed);
let res = {
......@@ -75,7 +79,9 @@ impl FreeListSpace {
treadmill.alloc_blocks(blocks_needed)
};
if TRACE_TREADMILL {
trace!("after allocation, space: {}", self);
}
res
}
......@@ -89,7 +95,13 @@ impl FreeListSpace {
pub fn sweep(&self) {
trace!("going to sweep treadmill space");
if TRACE_TREADMILL {
trace!("{}", self);
}
let mut nodes_scanned = 0;
let mut free_nodes_scanned = 0;
let mut alive_nodes_scanned = 0;
let mut treadmill = self.treadmill.lock().unwrap();
let trace_map = self.trace_map();
......@@ -98,11 +110,14 @@ impl FreeListSpace {
let mut resnapped_any = false;
loop {
trace!("scanning {}", unsafe{&*treadmill.scan});
trace!("scanning {}", unsafe { &*treadmill.scan });
let addr = unsafe{&*treadmill.scan}.payload;
if objectmodel::is_traced(trace_map, self.start, unsafe { addr.to_object_reference() }, mark_state) {
nodes_scanned += 1;
if objectmodel::is_traced(trace_map, self.start, unsafe { addr.to_object_reference() }, mark_state) && unsafe{&*treadmill.scan}.color == objectmodel::flip(mark_state) {
// the object is alive, do not need to 'move' its node
alive_nodes_scanned += 1;
// but they will be alive, we will set them to opposite mark color
// (meaning they are not available after flip)
......@@ -116,21 +131,28 @@ impl FreeListSpace {
// this object is dead
// we do not need to set their color
free_nodes_scanned += 1;
// we resnap it after current 'free' pointer
if treadmill.scan != treadmill.free {
// since we are going to move current node (scan), we get its prev first
let prev = unsafe{&*treadmill.scan}.prev();
trace!("get scan's prev before resnapping it: {}", unsafe{&*prev});
trace!("get scan's prev before resnapping it: {}", unsafe { &*prev });
let alive_node = unsafe { &mut *treadmill.scan }.remove();
trace!("is dead, take it out of treadmill");
if TRACE_TREADMILL {
trace!("treadmill: {}", &treadmill as &Treadmill);
}
// insert alive node after free
unsafe{&mut *treadmill.free}.insert_after(alive_node);
trace!("insert after free");
if TRACE_TREADMILL {
trace!("treadmill: {}", &treadmill as &Treadmill);
}
// if this is the first object inserted, it is the 'bottom'
// then 1) all resnapped objects will be between 'free' and 'bottom'
......@@ -149,7 +171,7 @@ impl FreeListSpace {
// check if we can stop
if resnapped_any && treadmill.scan == treadmill.b {
return;
break;
}
if !resnapped_any && treadmill.scan == treadmill.free {
// we never set bottom (meaning everything is alive)
......@@ -158,6 +180,13 @@ impl FreeListSpace {
panic!("we ran out of memory in large object space")
}
}
if cfg!(debug_assertions) {
debug!("---tread mill space---");
debug!("total nodes scanned: {}", nodes_scanned);
debug!("alive nodes scanned: {}", alive_nodes_scanned);
debug!("free nodes scanned: {}", free_nodes_scanned);
}
}
}
......@@ -287,6 +316,14 @@ impl fmt::Display for Treadmill {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
enum TreadmillNodeColor {
Offwhite,
White,
Grey,
Black
}
struct TreadmillNode {
payload: Address,
color: u8,
......
......@@ -271,6 +271,7 @@ impl ImmixSpace {
used_blocks_lock.append(&mut live_blocks);
if cfg!(debug_assertions) {
debug!("---immix space---");
debug!("free lines = {} of {} total ({} blocks)", free_lines, self.total_blocks * immix::LINES_IN_BLOCK, self.total_blocks);
debug!("usable blocks = {}", usable_blocks);
debug!("full blocks = {}", full_blocks);
......
......@@ -87,6 +87,37 @@ fn test_alloc_large_trigger_gc() {
mutator.destroy();
}
#[test]
#[allow(unused_variables)]
fn test_alloc_large_trigger_gc2() {
gc::gc_init(SMALL_SPACE_SIZE, 4096 * 10, 8);
let mut mutator = gc::new_mutator();
start_logging();
// this will exhaust the lo space
for _ in 0..10 {
mutator.yieldpoint();
let res = gc::muentry_alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
gc::muentry_init_large_object(&mut mutator, res, 0b1100_0000);
}
// this will trigger a gc, and allocate it in the collected space
let res = gc::muentry_alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
gc::muentry_init_large_object(&mut mutator, res, 0b1100_0000);
// this will trigger gcs for immix space
for _ in 0..100000 {
mutator.yieldpoint();
let res = mutator.alloc(OBJECT_SIZE, OBJECT_ALIGN);
mutator.init_object(res, 0b1100_0011);
}
mutator.destroy();
}
#[test]
fn test_alloc_mark() {
gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment