WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

lib.rs 6.9 KB
Newer Older
qinsoon's avatar
qinsoon committed
1
2
3
4
5
6
7
8
extern crate utils;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate simple_logger;
extern crate aligned_alloc;
extern crate crossbeam;
9
extern crate rustc_serialize;
10
11
#[macro_use]
extern crate field_offset;
qinsoon's avatar
qinsoon committed
12

13
14
15
16
17
18
use std::sync::atomic::Ordering;

pub mod common;
pub mod objectmodel;
pub mod heap;

qinsoon's avatar
qinsoon committed
19
use common::gctype::GCType;
qinsoon's avatar
qinsoon committed
20
use utils::ObjectReference;
qinsoon's avatar
qinsoon committed
21
use heap::immix::BYTES_IN_LINE;
qinsoon's avatar
qinsoon committed
22
23
24
25
use heap::immix::ImmixSpace;
use heap::immix::ImmixMutatorLocal;
use heap::freelist;
use heap::freelist::FreeListSpace;
26
use common::objectdump;
27

28
use utils::LinkedHashSet;
29
use utils::Address;
30

qinsoon's avatar
qinsoon committed
31
use std::fmt;
32
33
34
use std::sync::Arc;
use std::sync::RwLock;

qinsoon's avatar
qinsoon committed
35
36
pub const GC_MOVES_OBJECT : bool = false;

qinsoon's avatar
qinsoon committed
37
38
pub const LARGE_OBJECT_THRESHOLD : usize = BYTES_IN_LINE;

39
40
41
42
pub use heap::immix::ImmixMutatorLocal as Mutator;
pub use heap::immix::CURSOR_OFFSET as ALLOCATOR_CURSOR_OFFSET;
pub use heap::immix::LIMIT_OFFSET as ALLOCATOR_LIMIT_OFFSET;

43
44
45
#[repr(C)]
pub struct GC {
    immix_space: Arc<ImmixSpace>,
qinsoon's avatar
qinsoon committed
46
47
    lo_space   : Arc<FreeListSpace>,

48
49
    gc_types   : Vec<Arc<GCType>>,
    roots      : LinkedHashSet<ObjectReference>
50
51
}

qinsoon's avatar
qinsoon committed
52
53
54
55
impl fmt::Debug for GC {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "GC\n").unwrap();
        write!(f, "{}", self.immix_space).unwrap();
56
57

        write!(f, "{}", self.lo_space)
qinsoon's avatar
qinsoon committed
58
59
60
    }
}

61
62
63
64
lazy_static! {
    pub static ref MY_GC : RwLock<Option<GC>> = RwLock::new(None);
}

qinsoon's avatar
qinsoon committed
65
66
67
68
69
#[no_mangle]
pub extern fn gc_stats() {
    println!("{:?}", MY_GC.read().unwrap().as_ref().unwrap());
}

qinsoon's avatar
qinsoon committed
70
#[no_mangle]
71
pub extern fn get_spaces() -> (Arc<ImmixSpace>, Arc<FreeListSpace>) {
qinsoon's avatar
qinsoon committed
72
73
74
75
76
77
    let space_lock = MY_GC.read().unwrap();
    let space = space_lock.as_ref().unwrap();
    
    (space.immix_space.clone(), space.lo_space.clone())
}

qinsoon's avatar
qinsoon committed
78
#[no_mangle]
79
pub extern fn add_gc_type(mut ty: GCType) -> Arc<GCType> {
qinsoon's avatar
qinsoon committed
80
81
82
    let mut gc_guard = MY_GC.write().unwrap();
    let mut gc = gc_guard.as_mut().unwrap();

83
    let index = gc.gc_types.len() as u32;
qinsoon's avatar
qinsoon committed
84
85
    ty.id = index;

86
    let ty = Arc::new(ty);
qinsoon's avatar
qinsoon committed
87

88
89
90
    gc.gc_types.push(ty.clone());

    ty
qinsoon's avatar
qinsoon committed
91
92
}

93
94
95
96
97
#[no_mangle]
pub extern fn get_gc_type_encode(id: u32) -> u64 {
    let gc_lock = MY_GC.read().unwrap();
    let ref gctype  = gc_lock.as_ref().unwrap().gc_types[id as usize];

qinsoon's avatar
qinsoon committed
98
99
100
101
102
    if gctype.is_hybrid() {
        objectmodel::gen_hybrid_gctype_encode(gctype, 0) // fake length
    } else {
        objectmodel::gen_gctype_encode(gctype)
    }
103
104
}

105
#[no_mangle]
106
pub extern fn gc_init(immix_size: usize, lo_size: usize, n_gcthreads: usize, enable_gc: bool) {
107
108
    // set this line to turn on certain level of debugging info
//    simple_logger::init_with_level(log::LogLevel::Trace).ok();
109
110
111

    // init object model - init this first, since spaces may use it
    objectmodel::init();
112
113
114
115
116
117
118
    
    // init space size
    heap::IMMIX_SPACE_SIZE.store(immix_size, Ordering::SeqCst);
    heap::LO_SPACE_SIZE.store(lo_size, Ordering::SeqCst);
    
    let (immix_space, lo_space) = {
        let immix_space = Arc::new(ImmixSpace::new(immix_size));
119
        let lo_space    = Arc::new(FreeListSpace::new(lo_size));
120

qinsoon's avatar
qinsoon committed
121
        heap::gc::init(n_gcthreads);
122
123
124
125
        
        (immix_space, lo_space)
    };
    
qinsoon's avatar
qinsoon committed
126
127
128
129
    *MY_GC.write().unwrap() = Some(GC {
        immix_space: immix_space,
        lo_space: lo_space,

130
131
        gc_types: vec![],
        roots   : LinkedHashSet::new()
qinsoon's avatar
qinsoon committed
132
133
    });

134
135
136
137
138
139
    if enable_gc {
        heap::gc::ENABLE_GC.store(true, Ordering::Relaxed);
    } else {
        heap::gc::ENABLE_GC.store(false, Ordering::Relaxed);
    }

qinsoon's avatar
qinsoon committed
140
141
    info!("heap is {} bytes (immix: {} bytes, lo: {} bytes) . ", immix_size + lo_size, immix_size, lo_size);
    info!("{} gc threads", n_gcthreads);
142
143
144
    if !enable_gc {
        warn!("GC disabled (panic when a collection is triggered)");
    }
145
146
147
}

#[no_mangle]
148
149
pub extern fn new_mutator() -> ImmixMutatorLocal {
    ImmixMutatorLocal::new(MY_GC.read().unwrap().as_ref().unwrap().immix_space.clone())
150
151
152
153
}

#[no_mangle]
#[allow(unused_variables)]
154
155
pub extern fn drop_mutator(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.destroy();
qinsoon's avatar
qinsoon committed
156
    
157
158
159
    // rust will reclaim the boxed mutator
}

160
pub use heap::gc::set_low_water_mark;
161

162
163
164
165
166
167
168
169
170
171
172
173
174
175
// explicitly control roots

#[no_mangle]
pub extern fn add_to_root(obj: ObjectReference) {
    let mut gc = MY_GC.write().unwrap();
    gc.as_mut().unwrap().roots.insert(obj);
}

#[no_mangle]
pub extern fn remove_root(obj: ObjectReference) {
    let mut gc = MY_GC.write().unwrap();
    gc.as_mut().unwrap().roots.remove(&obj);
}

qinsoon's avatar
qinsoon committed
176
177
// yieldpoint

178
179
#[no_mangle]
#[inline(always)]
180
181
pub extern fn yieldpoint(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.yieldpoint();
182
183
184
185
}

#[no_mangle]
#[inline(never)]
186
187
pub extern fn yieldpoint_slow(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.yieldpoint_slow()
188
189
}

qinsoon's avatar
qinsoon committed
190
191
// allocation

192
#[inline(always)]
193
pub fn alloc(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
194
    let addr = unsafe {&mut *mutator}.alloc(size, align);
195
196
197
    unsafe {addr.to_object_reference()}
}

198
199
200
201
#[no_mangle]
#[inline(never)]
/// size doesn't include HEADER_SIZE, return value is offset by HEADER_OFFSET
pub extern fn muentry_alloc_fast(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
202
203
204
205
    let ret = alloc(mutator, size, align);
    trace!("muentry_alloc_fast(mutator: {:?}, size: {}, align: {}) = {}", mutator, size, align, ret);

    ret
206
207
}

208
#[no_mangle]
209
210
#[inline(never)]
pub extern fn muentry_init_object(mutator: *mut ImmixMutatorLocal, obj: ObjectReference, encode: u64) {
211
212
213
    unsafe {&mut *mutator}.init_object(obj.to_address(), encode);
}

qinsoon's avatar
qinsoon committed
214
215
216
217
218
219
#[no_mangle]
#[inline(never)]
pub extern fn muentry_init_hybrid(mutator: *mut ImmixMutatorLocal, obj: ObjectReference, encode: u64, length: u64) {
    unsafe {&mut *mutator}.init_hybrid(obj.to_address(), encode, length);
}

220
#[no_mangle]
qinsoon's avatar
qinsoon committed
221
#[inline(never)]
222
223
/// this function is supposed to be called by an inlined fastpath
/// size _includes_ HEADER_SIZE, return value is _NOT_ offset by HEADER_OFFSET
qinsoon's avatar
qinsoon committed
224
pub extern fn muentry_alloc_slow(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
225
    let ret = unsafe {&mut *mutator}.try_alloc_from_local(size, align);
226
227
    trace!("muentry_alloc_slow(mutator: {:?}, size: {}, align: {}) = {}", mutator, size, align, ret);

228
229
230
231
    unsafe {ret.to_object_reference()}
}

#[no_mangle]
232
/// size doesn't include HEADER_SIZE, return value is offset by HEADER_OFFSET
qinsoon's avatar
qinsoon committed
233
pub extern fn muentry_alloc_large(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
234
    let ret = freelist::alloc_large(size, align, unsafe {mutator.as_mut().unwrap()}, MY_GC.read().unwrap().as_ref().unwrap().lo_space.clone());
235
236
    trace!("muentry_alloc_large(mutator: {:?}, size: {}, align: {}) = {}", mutator, size, align, ret);

237
    unsafe {ret.to_object_reference()}
238
239
}

qinsoon's avatar
qinsoon committed
240
241
242
243
// force gc
#[no_mangle]
pub extern fn force_gc() {
    heap::gc::trigger_gc();
244
}
245
246
247
248
249
250

// dump heap
#[no_mangle]
pub extern fn persist_heap(roots: Vec<Address>) -> objectdump::HeapDump {
    objectdump::HeapDump::from_roots(roots)
}