To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

lib.rs 4.29 KB
Newer Older
qinsoon's avatar
qinsoon committed
1
2
3
4
5
6
7
8
9
extern crate utils;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate simple_logger;
extern crate aligned_alloc;
extern crate crossbeam;

10
11
12
13
14
15
use std::sync::atomic::Ordering;

pub mod common;
pub mod objectmodel;
pub mod heap;

qinsoon's avatar
qinsoon committed
16
use utils::ObjectReference;
qinsoon's avatar
qinsoon committed
17
use heap::immix::BYTES_IN_LINE;
qinsoon's avatar
qinsoon committed
18
19
20
21
use heap::immix::ImmixSpace;
use heap::immix::ImmixMutatorLocal;
use heap::freelist;
use heap::freelist::FreeListSpace;
22

qinsoon's avatar
qinsoon committed
23
use std::fmt;
24
25
26
use std::sync::Arc;
use std::sync::RwLock;

qinsoon's avatar
qinsoon committed
27
28
pub const LARGE_OBJECT_THRESHOLD : usize = BYTES_IN_LINE;

29
30
31
32
pub use heap::immix::ImmixMutatorLocal as Mutator;
pub use heap::immix::CURSOR_OFFSET as ALLOCATOR_CURSOR_OFFSET;
pub use heap::immix::LIMIT_OFFSET as ALLOCATOR_LIMIT_OFFSET;

33
34
35
36
37
38
#[repr(C)]
pub struct GC {
    immix_space: Arc<ImmixSpace>,
    lo_space   : Arc<RwLock<FreeListSpace>>
}

qinsoon's avatar
qinsoon committed
39
40
41
42
43
44
45
46
47
48
impl fmt::Debug for GC {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "GC\n").unwrap();
        write!(f, "{}", self.immix_space).unwrap();
        
        let lo_lock = self.lo_space.read().unwrap();
        write!(f, "{}", *lo_lock)
    }
}

49
50
51
52
lazy_static! {
    pub static ref MY_GC : RwLock<Option<GC>> = RwLock::new(None);
}

qinsoon's avatar
qinsoon committed
53
54
55
56
57
#[no_mangle]
pub extern fn gc_stats() {
    println!("{:?}", MY_GC.read().unwrap().as_ref().unwrap());
}

qinsoon's avatar
qinsoon committed
58
59
60
61
62
63
64
65
#[no_mangle]
pub extern fn get_spaces() -> (Arc<ImmixSpace>, Arc<RwLock<FreeListSpace>>) {
    let space_lock = MY_GC.read().unwrap();
    let space = space_lock.as_ref().unwrap();
    
    (space.immix_space.clone(), space.lo_space.clone())
}

66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
#[no_mangle]
pub extern fn gc_init(immix_size: usize, lo_size: usize, n_gcthreads: usize) {
    // set this line to turn on certain level of debugging info
//    simple_logger::init_with_level(log::LogLevel::Trace).ok();
    
    // init space size
    heap::IMMIX_SPACE_SIZE.store(immix_size, Ordering::SeqCst);
    heap::LO_SPACE_SIZE.store(lo_size, Ordering::SeqCst);
    
    let (immix_space, lo_space) = {
        let immix_space = Arc::new(ImmixSpace::new(immix_size));
        let lo_space    = Arc::new(RwLock::new(FreeListSpace::new(lo_size)));

        heap::gc::init(immix_space.clone(), lo_space.clone());        
        
        (immix_space, lo_space)
    };
    
    *MY_GC.write().unwrap() = Some(GC {immix_space: immix_space, lo_space: lo_space});
qinsoon's avatar
qinsoon committed
85
    info!("heap is {} bytes (immix: {} bytes, lo: {} bytes) . ", immix_size + lo_size, immix_size, lo_size);
86
87
88
    
    // gc threads
    heap::gc::GC_THREADS.store(n_gcthreads, Ordering::SeqCst);
qinsoon's avatar
qinsoon committed
89
    info!("{} gc threads", n_gcthreads);
90
91
92
93
94
95
    
    // init object model
    objectmodel::init();
}

#[no_mangle]
96
97
pub extern fn new_mutator() -> ImmixMutatorLocal {
    ImmixMutatorLocal::new(MY_GC.read().unwrap().as_ref().unwrap().immix_space.clone())
98
99
100
101
}

#[no_mangle]
#[allow(unused_variables)]
102
103
pub extern fn drop_mutator(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.destroy();
qinsoon's avatar
qinsoon committed
104
    
105
106
107
108
109
110
111
112
113
114
115
    // rust will reclaim the boxed mutator
}

#[cfg(target_arch = "x86_64")]
#[link(name = "gc_clib_x64")]
extern "C" {
    pub fn set_low_water_mark();
}

#[no_mangle]
#[inline(always)]
116
117
pub extern fn yieldpoint(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.yieldpoint();
118
119
120
121
}

#[no_mangle]
#[inline(never)]
122
123
pub extern fn yieldpoint_slow(mutator: *mut ImmixMutatorLocal) {
    unsafe {mutator.as_mut().unwrap()}.yieldpoint_slow()
124
125
126
127
}

#[no_mangle]
#[inline(always)]
128
129
pub extern fn alloc(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
    let addr = unsafe {mutator.as_mut().unwrap()}.alloc(size, align);
130
131
132
133
    unsafe {addr.to_object_reference()}
}

#[no_mangle]
qinsoon's avatar
qinsoon committed
134
#[inline(never)]
qinsoon's avatar
qinsoon committed
135
pub extern fn muentry_alloc_slow(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
qinsoon's avatar
qinsoon committed
136
    trace!("muentry_alloc_slow(mutator: {:?}, size: {}, align: {})", mutator, size, align);
137
    let ret = unsafe {mutator.as_mut().unwrap()}.try_alloc_from_local(size, align);
138
139
140
141
    unsafe {ret.to_object_reference()}
}

#[no_mangle]
qinsoon's avatar
qinsoon committed
142
143
pub extern fn muentry_alloc_large(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
    trace!("muentry_alloc_large(mutator: {:?}, size: {}, align: {})", mutator, size, align);
144
    let ret = freelist::alloc_large(size, align, unsafe {mutator.as_mut().unwrap()}, MY_GC.read().unwrap().as_ref().unwrap().lo_space.clone());
145
146
    unsafe {ret.to_object_reference()}
}