lib.rs 6.09 KB
Newer Older
qinsoon's avatar
qinsoon committed
1 2 3 4 5 6 7 8
extern crate utils;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate simple_logger;
extern crate aligned_alloc;
extern crate crossbeam;
9
extern crate rustc_serialize;
qinsoon's avatar
qinsoon committed
10

11 12 13 14 15 16
use std::sync::atomic::Ordering;

pub mod common;
pub mod objectmodel;
pub mod heap;

qinsoon's avatar
qinsoon committed
17
use common::gctype::GCType;
qinsoon's avatar
qinsoon committed
18
use utils::ObjectReference;
qinsoon's avatar
qinsoon committed
19
use heap::immix::BYTES_IN_LINE;
qinsoon's avatar
qinsoon committed
20 21 22 23
use heap::immix::ImmixSpace;
use heap::immix::ImmixMutatorLocal;
use heap::freelist;
use heap::freelist::FreeListSpace;
24
use common::objectdump;
25

26
use utils::LinkedHashSet;
27
use utils::Address;
28

qinsoon's avatar
qinsoon committed
29
use std::fmt;
30 31 32
use std::sync::Arc;
use std::sync::RwLock;

qinsoon's avatar
qinsoon committed
33 34
pub const GC_MOVES_OBJECT : bool = false;

qinsoon's avatar
qinsoon committed
35 36
pub const LARGE_OBJECT_THRESHOLD : usize = BYTES_IN_LINE;

37 38 39 40
pub use heap::immix::ImmixMutatorLocal as Mutator;
pub use heap::immix::CURSOR_OFFSET as ALLOCATOR_CURSOR_OFFSET;
pub use heap::immix::LIMIT_OFFSET as ALLOCATOR_LIMIT_OFFSET;

41 42 43
#[repr(C)]
pub struct GC {
    immix_space: Arc<ImmixSpace>,
qinsoon's avatar
qinsoon committed
44 45
    lo_space   : Arc<FreeListSpace>,

46 47
    gc_types   : Vec<Arc<GCType>>,
    roots      : LinkedHashSet<ObjectReference>
48 49
}

qinsoon's avatar
qinsoon committed
50 51 52 53
impl fmt::Debug for GC {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "GC\n").unwrap();
        write!(f, "{}", self.immix_space).unwrap();
54 55

        write!(f, "{}", self.lo_space)
qinsoon's avatar
qinsoon committed
56 57 58
    }
}

59 60 61 62
lazy_static! {
    pub static ref MY_GC : RwLock<Option<GC>> = RwLock::new(None);
}

qinsoon's avatar
qinsoon committed
63 64 65 66 67
#[no_mangle]
pub extern fn gc_stats() {
    println!("{:?}", MY_GC.read().unwrap().as_ref().unwrap());
}

qinsoon's avatar
qinsoon committed
68
#[no_mangle]
69
pub extern fn get_spaces() -> (Arc<ImmixSpace>, Arc<FreeListSpace>) {
qinsoon's avatar
qinsoon committed
70 71 72 73 74 75
    let space_lock = MY_GC.read().unwrap();
    let space = space_lock.as_ref().unwrap();
    
    (space.immix_space.clone(), space.lo_space.clone())
}

qinsoon's avatar
qinsoon committed
76
#[no_mangle]
77
pub extern fn add_gc_type(mut ty: GCType) -> Arc<GCType> {
qinsoon's avatar
qinsoon committed
78 79 80
    let mut gc_guard = MY_GC.write().unwrap();
    let mut gc = gc_guard.as_mut().unwrap();

81
    let index = gc.gc_types.len() as u32;
qinsoon's avatar
qinsoon committed
82 83
    ty.id = index;

84
    let ty = Arc::new(ty);
qinsoon's avatar
qinsoon committed
85

86 87 88
    gc.gc_types.push(ty.clone());

    ty
qinsoon's avatar
qinsoon committed
89 90
}

91 92 93 94 95
#[no_mangle]
pub extern fn get_gc_type_encode(id: u32) -> u64 {
    let gc_lock = MY_GC.read().unwrap();
    let ref gctype  = gc_lock.as_ref().unwrap().gc_types[id as usize];

qinsoon's avatar
qinsoon committed
96 97 98 99 100
    if gctype.is_hybrid() {
        objectmodel::gen_hybrid_gctype_encode(gctype, 0) // fake length
    } else {
        objectmodel::gen_gctype_encode(gctype)
    }
101 102
}

103 104 105 106
#[no_mangle]
pub extern fn gc_init(immix_size: usize, lo_size: usize, n_gcthreads: usize) {
    // set this line to turn on certain level of debugging info
//    simple_logger::init_with_level(log::LogLevel::Trace).ok();
107 108 109

    // init object model - init this first, since spaces may use it
    objectmodel::init();
110 111 112 113 114 115 116
    
    // init space size
    heap::IMMIX_SPACE_SIZE.store(immix_size, Ordering::SeqCst);
    heap::LO_SPACE_SIZE.store(lo_size, Ordering::SeqCst);
    
    let (immix_space, lo_space) = {
        let immix_space = Arc::new(ImmixSpace::new(immix_size));
117
        let lo_space    = Arc::new(FreeListSpace::new(lo_size));
118

qinsoon's avatar
qinsoon committed
119
        heap::gc::init(n_gcthreads);
120 121 122 123
        
        (immix_space, lo_space)
    };
    
qinsoon's avatar
qinsoon committed
124 125 126 127
    *MY_GC.write().unwrap() = Some(GC {
        immix_space: immix_space,
        lo_space: lo_space,

128 129
        gc_types: vec![],
        roots   : LinkedHashSet::new()
qinsoon's avatar
qinsoon committed
130 131
    });

qinsoon's avatar
qinsoon committed
132 133
    info!("heap is {} bytes (immix: {} bytes, lo: {} bytes) . ", immix_size + lo_size, immix_size, lo_size);
    info!("{} gc threads", n_gcthreads);
134 135 136
}

#[no_mangle]
137 138
pub extern fn new_mutator() -> ImmixMutatorLocal {
    ImmixMutatorLocal::new(MY_GC.read().unwrap().as_ref().unwrap().immix_space.clone())
139 140 141 142
}

#[no_mangle]
#[allow(unused_variables)]
143 144
pub extern fn drop_mutator(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.destroy();
qinsoon's avatar
qinsoon committed
145
    
146 147 148 149 150 151 152 153 154
    // rust will reclaim the boxed mutator
}

#[cfg(target_arch = "x86_64")]
#[link(name = "gc_clib_x64")]
extern "C" {
    pub fn set_low_water_mark();
}

155 156 157 158 159 160 161 162 163 164 165 166 167 168
// explicitly control roots

#[no_mangle]
pub extern fn add_to_root(obj: ObjectReference) {
    let mut gc = MY_GC.write().unwrap();
    gc.as_mut().unwrap().roots.insert(obj);
}

#[no_mangle]
pub extern fn remove_root(obj: ObjectReference) {
    let mut gc = MY_GC.write().unwrap();
    gc.as_mut().unwrap().roots.remove(&obj);
}

169 170
// yieldpoint

171 172
#[no_mangle]
#[inline(always)]
173 174
pub extern fn yieldpoint(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.yieldpoint();
175 176 177 178
}

#[no_mangle]
#[inline(never)]
179 180
pub extern fn yieldpoint_slow(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.yieldpoint_slow()
181 182
}

183 184
// allocation

185 186
#[no_mangle]
#[inline(always)]
187
pub extern fn alloc(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
188
    let addr = unsafe {&mut *mutator}.alloc(size, align);
189 190 191
    unsafe {addr.to_object_reference()}
}

192
#[no_mangle]
193 194
#[inline(never)]
pub extern fn muentry_init_object(mutator: *mut ImmixMutatorLocal, obj: ObjectReference, encode: u64) {
195 196 197
    unsafe {&mut *mutator}.init_object(obj.to_address(), encode);
}

qinsoon's avatar
qinsoon committed
198 199 200 201 202 203
#[no_mangle]
#[inline(never)]
pub extern fn muentry_init_hybrid(mutator: *mut ImmixMutatorLocal, obj: ObjectReference, encode: u64, length: u64) {
    unsafe {&mut *mutator}.init_hybrid(obj.to_address(), encode, length);
}

204
#[no_mangle]
qinsoon's avatar
qinsoon committed
205
#[inline(never)]
qinsoon's avatar
qinsoon committed
206
pub extern fn muentry_alloc_slow(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
207
    let ret = unsafe {&mut *mutator}.try_alloc_from_local(size, align);
208 209
    trace!("muentry_alloc_slow(mutator: {:?}, size: {}, align: {}) = {}", mutator, size, align, ret);

210 211 212 213
    unsafe {ret.to_object_reference()}
}

#[no_mangle]
qinsoon's avatar
qinsoon committed
214
pub extern fn muentry_alloc_large(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
215
    let ret = freelist::alloc_large(size, align, unsafe {mutator.as_mut().unwrap()}, MY_GC.read().unwrap().as_ref().unwrap().lo_space.clone());
216 217
    trace!("muentry_alloc_large(mutator: {:?}, size: {}, align: {}) = {}", mutator, size, align, ret);

218
    unsafe {ret.to_object_reference()}
219 220
}

221 222 223 224
// force gc
#[no_mangle]
pub extern fn force_gc() {
    heap::gc::trigger_gc();
225
}
226 227 228 229 230 231

// dump heap
#[no_mangle]
pub extern fn persist_heap(roots: Vec<Address>) -> objectdump::HeapDump {
    objectdump::HeapDump::from_roots(roots)
}