GitLab will be upgraded to the 12.10.14-ce.0 on 28 Sept 2020 at 2.00pm (AEDT) to 2.30pm (AEDT). During the update, GitLab and Mattermost services will not be available. If you have any concerns with this, please talk to us at N110 (b) CSIT building.

test_gc_harness.rs 8.23 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
extern crate gc;
extern crate utils;
extern crate simple_logger;
extern crate log;

use self::log::LogLevel;
use self::gc::heap;
use self::gc::objectmodel;
use self::utils::Address;
use std::sync::atomic::Ordering;

pub fn start_logging() {

    match simple_logger::init_with_level(LogLevel::Trace) {
        Ok(_) => {},
        Err(_) => {}
    }
}

const OBJECT_SIZE : usize = 24;
const OBJECT_ALIGN: usize = 8;

const WORK_LOAD : usize = 10000;

qinsoon's avatar
qinsoon committed
25
#[allow(dead_code)]
26
const SPACIOUS_SPACE_SIZE : usize = 500 << 20;  // 500mb
qinsoon's avatar
qinsoon committed
27
#[allow(dead_code)]
28
const LIMITED_SPACE_SIZE  : usize = 20  << 20;  // 20mb
qinsoon's avatar
qinsoon committed
29
#[allow(dead_code)]
30 31
const SMALL_SPACE_SIZE    : usize = 1   << 19;  // 512kb

qinsoon's avatar
qinsoon committed
32
#[allow(dead_code)]
33
const IMMIX_SPACE_SIZE : usize = SPACIOUS_SPACE_SIZE;
qinsoon's avatar
qinsoon committed
34
#[allow(dead_code)]
35 36
const LO_SPACE_SIZE    : usize = SPACIOUS_SPACE_SIZE;

37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
#[cfg(feature = "use-sidemap")]
const FIXSIZE_NOREF_ENCODE : u64 = 0b1100_0000u64;
#[cfg(not(feature = "use-sidemap"))]
const FIXSIZE_NOREF_ENCODE : u64 = 0xb000000000000000u64;

#[cfg(feature = "use-sidemap")]
const FIXSIZE_REFx2_ENCODE : u64 = 0b1100_0011u64;
#[cfg(not(feature = "use-sidemap"))]
const FIXSIZE_REFx2_ENCODE : u64 = 0xb000000000000003u64;

#[cfg(feature = "use-sidemap")]
const FIXSIZE_REFx1_ENCODE : u64 = 0b1100_0001u64;
#[cfg(not(feature = "use-sidemap"))]
const FIXSIZE_REFx1_ENCODE : u64 = 0xb000000000000001u64;


53 54 55 56 57 58 59 60 61 62 63 64 65 66
#[test]
fn test_exhaust_alloc() {
    gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
    let mut mutator = gc::new_mutator();

    println!("Trying to allocate {} objects of (size {}, align {}). ", WORK_LOAD, OBJECT_SIZE, OBJECT_ALIGN);
    const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
    println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);
    println!("This would take {} bytes of {} bytes heap", WORK_LOAD * ACTUAL_OBJECT_SIZE, heap::IMMIX_SPACE_SIZE.load(Ordering::SeqCst));

    for _ in 0..WORK_LOAD {
        mutator.yieldpoint();

        let res = mutator.alloc(OBJECT_SIZE, OBJECT_ALIGN);
67
        mutator.init_object(res, FIXSIZE_NOREF_ENCODE);
68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
    }

    mutator.destroy();
}

const LARGE_OBJECT_SIZE : usize = 256;

#[test]
#[allow(unused_variables)]
fn test_exhaust_alloc_large() {
    gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
    let mut mutator = gc::new_mutator();

    start_logging();

    for _ in 0..WORK_LOAD {
        mutator.yieldpoint();

        let res = gc::muentry_alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
87
        gc::muentry_init_object(&mut mutator, res, FIXSIZE_NOREF_ENCODE);
88 89 90 91 92 93 94
    }

    mutator.destroy();
}

#[test]
#[allow(unused_variables)]
95 96 97 98
fn test_alloc_large_lo_trigger_gc() {
    const KEEP_N_ROOTS : usize = 1;
    let mut roots : usize = 0;

99
    gc::gc_init(SMALL_SPACE_SIZE, 4096 * 10, 8);
100 101 102 103 104 105 106 107
    let mut mutator = gc::new_mutator();

    start_logging();

    for _ in 0..WORK_LOAD {
        mutator.yieldpoint();

        let res = gc::muentry_alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
108
        gc::muentry_init_object(&mut mutator, res, FIXSIZE_NOREF_ENCODE);
109 110 111 112 113

        if roots < KEEP_N_ROOTS {
            gc::add_to_root(res);
            roots += 1;
        }
114 115 116 117 118
    }

    mutator.destroy();
}

119 120
#[test]
#[allow(unused_variables)]
121
fn test_alloc_large_both_trigger_gc() {
122 123 124 125 126 127 128 129 130 131
    gc::gc_init(SMALL_SPACE_SIZE, 4096 * 10, 8);
    let mut mutator = gc::new_mutator();

    start_logging();

    // this will exhaust the lo space
    for _ in 0..10 {
        mutator.yieldpoint();

        let res = gc::muentry_alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
132
        gc::muentry_init_object(&mut mutator, res, FIXSIZE_NOREF_ENCODE);
133 134 135 136
    }

    // this will trigger a gc, and allocate it in the collected space
    let res = gc::muentry_alloc_large(&mut mutator, LARGE_OBJECT_SIZE, OBJECT_ALIGN);
137
    gc::muentry_init_object(&mut mutator, res, FIXSIZE_NOREF_ENCODE);
138 139 140 141 142 143

    // this will trigger gcs for immix space
    for _ in 0..100000 {
        mutator.yieldpoint();

        let res = mutator.alloc(OBJECT_SIZE, OBJECT_ALIGN);
144
        mutator.init_object(res, FIXSIZE_REFx2_ENCODE);
145 146 147 148 149
    }

    mutator.destroy();
}

150
#[test]
151
#[cfg(feature = "use-sidemap")]
152 153 154 155 156 157 158 159 160 161 162 163
fn test_alloc_mark() {
    gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
    let mut mutator = gc::new_mutator();

    println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
    const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
    println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);

    println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
    let mut objs = vec![];
    for _ in 0..WORK_LOAD {
        let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
164
        mutator.init_object(res, FIXSIZE_REFx2_ENCODE);
165 166 167 168 169 170 171

        objs.push(unsafe {res.to_object_reference()});
    }

    let (shared_space, _) = gc::get_spaces();

    println!("Start marking");
172
    let mark_state = objectmodel::load_mark_state();
173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193

    let line_mark_table = shared_space.line_mark_table();
    let (space_start, space_end) = (shared_space.start(), shared_space.end());

    let trace_map = shared_space.trace_map.ptr;

    for i in 0..objs.len() {
        let obj = unsafe {*objs.get_unchecked(i)};

        // mark the object as traced
        objectmodel::mark_as_traced(trace_map, space_start, obj, mark_state);

        // mark meta-data
        if obj.to_address() >= space_start && obj.to_address() < space_end {
            line_mark_table.mark_line_live2(space_start, obj.to_address());
        }
    }

    mutator.destroy();
}

194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237
#[test]
#[cfg(not(feature = "use-sidemap"))]
fn test_alloc_mark() {
    gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
    let mut mutator = gc::new_mutator();

    println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
    const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
    println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);

    println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
    let mut objs = vec![];
    for _ in 0..WORK_LOAD {
        let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
        mutator.init_object(res, FIXSIZE_REFx2_ENCODE);

        objs.push(unsafe {res.to_object_reference()});
    }

    let (shared_space, _) = gc::get_spaces();

    println!("Start marking");
    let mark_state = objectmodel::load_mark_state();

    let line_mark_table = shared_space.line_mark_table();
    let (space_start, space_end) = (shared_space.start(), shared_space.end());

    let trace_map = shared_space.trace_map.ptr;

    for i in 0..objs.len() {
        let obj = unsafe {*objs.get_unchecked(i)};

        // mark the object as traced
        objectmodel::mark_as_traced(obj, mark_state);

        // mark meta-data
        if obj.to_address() >= space_start && obj.to_address() < space_end {
            line_mark_table.mark_line_live2(space_start, obj.to_address());
        }
    }

    mutator.destroy();
}

238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258
#[allow(dead_code)]
struct Node<'a> {
    hdr  : u64,
    next : &'a Node<'a>,
    unused_ptr : usize,
    unused_int : i32,
    unused_int2: i32
}

#[test]
fn test_alloc_trace() {
    gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
    let mut mutator = gc::new_mutator();
    let (shared_space, lo_space) = gc::get_spaces();

    println!("Trying to allocate 1 object of (size {}, align {}). ", OBJECT_SIZE, OBJECT_ALIGN);
    const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
    println!("Considering header size of {}, an object should be {}. ", 0, ACTUAL_OBJECT_SIZE);

    println!("Trying to allocate {} objects, which will take roughly {} bytes", WORK_LOAD, WORK_LOAD * ACTUAL_OBJECT_SIZE);
    let root = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
259
    mutator.init_object(root, FIXSIZE_REFx1_ENCODE);
260 261 262 263

    let mut prev = root;
    for _ in 0..WORK_LOAD - 1 {
        let res = mutator.alloc(ACTUAL_OBJECT_SIZE, OBJECT_ALIGN);
264
        mutator.init_object(res, FIXSIZE_REFx1_ENCODE);
265 266 267 268 269 270 271 272 273 274 275 276 277

        // set prev's 1st field (offset 0) to this object
        unsafe {prev.store::<Address>(res)};

        prev = res;
    }

    println!("Start tracing");
    let mut roots = vec![unsafe {root.to_object_reference()}];

    heap::gc::start_trace(&mut roots, shared_space, lo_space);

    mutator.destroy();
278
}