To protect your data, the CISO officer has suggested users to enable GitLab 2FA as soon as possible.

immix_mutator.rs 10 KB
Newer Older
qinsoon's avatar
qinsoon committed
1
2
3
4
use heap::immix;
use heap::immix::ImmixSpace;
use heap::immix::immix_space::ImmixBlock;
use heap::gc;
5
use objectmodel;
6

qinsoon's avatar
qinsoon committed
7
8
use utils::LOG_POINTER_SIZE;
use utils::Address;
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28

use std::*;
use std::sync::Arc;
use std::sync::RwLock;
use std::sync::atomic::{AtomicBool, Ordering};

const MAX_MUTATORS : usize = 1024;
lazy_static! {
    pub static ref MUTATORS : RwLock<Vec<Option<Arc<ImmixMutatorGlobal>>>> = {
        let mut ret = Vec::with_capacity(MAX_MUTATORS);
        for _ in 0..MAX_MUTATORS {
            ret.push(None);
        }
        RwLock::new(ret)
    };
    
    pub static ref N_MUTATORS : RwLock<usize> = RwLock::new(0);
}

#[repr(C)]
29
// do not change the layout (unless change the offset of fields correspondingly)
30
31
32
33
34
35
pub struct ImmixMutatorLocal {
    id        : usize,
    
    // use raw pointer here instead of AddressMapTable
    // to avoid indirection in fast path    
    alloc_map : *mut u8,
36
    trace_map : *mut u8,
37
38
39
40
41
42
43
44
45
46
47
48
49
50
    space_start: Address,
    
    // cursor might be invalid, but Option<Address> is expensive here
    // after every GC, we set both cursor and limit
    // to Address::zero() so that alloc will branch to slow path    
    cursor    : Address,
    limit     : Address,
    line      : usize,
    
    // globally accessible per-thread fields
    pub global    : Arc<ImmixMutatorGlobal>,
    
    space     : Arc<ImmixSpace>,
    block     : Option<Box<ImmixBlock>>,
51
52

    mark_state: u8
53
54
}

55
56
57
58
59
60
61
62
63
lazy_static! {
    pub static ref CURSOR_OFFSET : usize = mem::size_of::<usize>()
                + mem::size_of::<*mut u8>()
                + mem::size_of::<Address>();
                
    pub static ref LIMIT_OFFSET : usize = *CURSOR_OFFSET
                + mem::size_of::<Address>();
}

64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
pub struct ImmixMutatorGlobal {
    take_yield : AtomicBool,
    still_blocked : AtomicBool
}

impl ImmixMutatorLocal {
    pub fn reset(&mut self) -> () {
        unsafe {
            // should not use Address::zero() other than initialization
            self.cursor = Address::zero();
            self.limit = Address::zero();
        }
        self.line = immix::LINES_IN_BLOCK;
        
        self.block = None;
    }
80
81
82
83
84

    pub fn reset_after_gc(&mut self) {
        self.reset();
        self.mark_state ^= 1;
    }
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
    
    pub fn new(space : Arc<ImmixSpace>) -> ImmixMutatorLocal {
        let global = Arc::new(ImmixMutatorGlobal::new());
        
        let mut id_lock = N_MUTATORS.write().unwrap();
        {
            let mut mutators_lock = MUTATORS.write().unwrap();
            mutators_lock.remove(*id_lock);
            mutators_lock.insert(*id_lock, Some(global.clone()));
        }
        
        let ret = ImmixMutatorLocal {
            id : *id_lock,
            cursor: unsafe {Address::zero()}, limit: unsafe {Address::zero()}, line: immix::LINES_IN_BLOCK,
            block: None,
            alloc_map: space.alloc_map.ptr,
101
            trace_map: space.trace_map.ptr,
102
103
            space_start: space.start(),
            global: global,
104
105
            space: space,
            mark_state: objectmodel::INIT_MARK_STATE as u8
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
        };
        *id_lock += 1;
        
        ret
    }
    
    pub fn destroy(&mut self) {
        {
            self.return_block();
        }
        
        let mut mutator_count_lock = N_MUTATORS.write().unwrap();
        
        let mut mutators_lock = MUTATORS.write().unwrap();
        mutators_lock.push(None);
        mutators_lock.swap_remove(self.id);
        
        *mutator_count_lock = *mutator_count_lock - 1;
        
        if cfg!(debug_assertions) {
qinsoon's avatar
qinsoon committed
126
            debug!("destroy mutator. Now live mutators = {}", *mutator_count_lock);
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
        }
    }
    
    #[inline(always)]
    pub fn yieldpoint(&mut self) {
        if self.global.take_yield() {
            self.yieldpoint_slow();
        }
    }
    
    #[inline(never)]
    pub fn yieldpoint_slow(&mut self) {
        trace!("Mutator{}: yieldpoint triggered, slow path", self.id);
        gc::sync_barrier(self);
    }
    
    #[inline(always)]
    pub fn alloc(&mut self, size: usize, align: usize) -> Address {
        let start = self.cursor.align_up(align);
        let end = start.plus(size);
147
148
149

        let size = size + objectmodel::OBJECT_HEADER_SIZE;

150
        if end > self.limit {
151
152
153
154
155
156
157
158
159
160
            let ret = self.try_alloc_from_local(size, align);
            
            if cfg!(debug_assertions) {
                if !ret.is_aligned_to(align) {
                    use std::process;
                    println!("wrong alignment on 0x{:x}, expected align: {}", ret, align);
                    process::exit(102);
                }
            }
            
161
            ret.offset(-objectmodel::OBJECT_HEADER_OFFSET)
162
        } else {
163
164
165
166
167
168
169
            if cfg!(debug_assertions) {
                if !start.is_aligned_to(align) {
                    use std::process;
                    println!("wrong alignment on 0x{:x}, expected align: {}", start, align);
                    process::exit(102);
                }
            }
170
171
            self.cursor = end;
            
172
            start.offset(-objectmodel::OBJECT_HEADER_OFFSET)
173
174
175
176
        } 
    }
    
    #[inline(always)]
177
178
    #[cfg(feature = "use-sidemap")]
    pub fn init_object(&mut self, addr: Address, encode: u64) {
179
        unsafe {
180
            *self.alloc_map.offset((addr.diff(self.space_start) >> LOG_POINTER_SIZE) as isize) = encode as u8;
181
            objectmodel::mark_as_untraced(self.trace_map, self.space_start, addr, self.mark_state);
182
183
        }
    }
184
185
186
187
188
189
190

    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    pub fn init_object(&mut self, addr: Address, encode: u64) {
        unsafe {
            addr.offset(objectmodel::OBJECT_HEADER_OFFSET).store(encode);
        }
191
192
193
194
195
196
197
198
199
200
201
202
203
204
    }
    
    #[inline(never)]
    pub fn try_alloc_from_local(&mut self, size : usize, align: usize) -> Address {
        if self.line < immix::LINES_IN_BLOCK {
            let opt_next_available_line = {
                let cur_line = self.line;
                self.block().get_next_available_line(cur_line)
            };
    
            match opt_next_available_line {
                Some(next_available_line) => {
                    // we can alloc from local blocks
                    let end_line = self.block().get_next_unavailable_line(next_available_line);
qinsoon's avatar
qinsoon committed
205

206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
                    self.cursor = self.block().start().plus(next_available_line << immix::LOG_BYTES_IN_LINE);
                    self.limit  = self.block().start().plus(end_line << immix::LOG_BYTES_IN_LINE);
                    self.line   = end_line;
                    
                    self.cursor.memset(0, self.limit.diff(self.cursor));
                    
                    for line in next_available_line..end_line {
                        self.block().line_mark_table_mut().set(line, immix::LineMark::FreshAlloc);
                    }
                    
                    self.alloc(size, align)
                },
                None => {
                    self.alloc_from_global(size, align)
                }
            }
        } else {
            // we need to alloc from global space
            self.alloc_from_global(size, align)
        }
    }
    
    fn alloc_from_global(&mut self, size: usize, align: usize) -> Address {
        trace!("Mutator{}: slowpath: alloc_from_global", self.id);
        
        self.return_block();

        loop {
            // check if yield
            self.yieldpoint();
            
            let new_block : Option<Box<ImmixBlock>> = self.space.get_next_usable_block();
            
            match new_block {
240
241
242
243
                Some(mut b) => {
                    // zero the block
                    b.lazy_zeroing();

244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
                    self.block    = Some(b);
                    self.cursor   = self.block().start();
                    self.limit    = self.block().start();
                    self.line     = 0;
                    
                    return self.alloc(size, align);
                },
                None => {continue; }
            }
        }
    }
    
    pub fn prepare_for_gc(&mut self) {
        self.return_block();
    }
    
    pub fn id(&self) -> usize {
        self.id
    }

    fn return_block(&mut self) {
        if self.block.is_some() {
qinsoon's avatar
qinsoon committed
266
            trace!("finishing block {:?}", self.block.as_ref().unwrap());
267
268
269
270
271
272
273
274
275
276
277
278
            self.space.return_used_block(self.block.take().unwrap());
        }        
    }
    fn block(&mut self) -> &mut ImmixBlock {
        self.block.as_mut().unwrap()
    }
    
    pub fn print_object(&self, obj: Address, length: usize) {
        ImmixMutatorLocal::print_object_static(obj, length);
    }
    
    pub fn print_object_static(obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
279
        debug!("===Object {:#X} size: {} bytes===", obj, length);
280
281
        let mut cur_addr = obj;
        while cur_addr < obj.plus(length) {
qinsoon's avatar
qinsoon committed
282
            debug!("Address: {:#X}   {:#X}", cur_addr, unsafe {cur_addr.load::<u64>()});
283
284
            cur_addr = cur_addr.plus(8);
        }
qinsoon's avatar
qinsoon committed
285
286
        debug!("----");
        debug!("=========");        
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
    }
}

impl ImmixMutatorGlobal {
    pub fn new() -> ImmixMutatorGlobal {
        ImmixMutatorGlobal {
            take_yield: AtomicBool::new(false),
            still_blocked: AtomicBool::new(false)
        }
    }
    
    #[inline(always)]
    pub fn is_still_blocked(&self) -> bool {
        self.still_blocked.load(Ordering::SeqCst)
    }
    pub fn set_still_blocked(&self, b : bool) {
        self.still_blocked.store(b, Ordering::SeqCst);
    }
    
    pub fn set_take_yield(&self, b : bool) {
        self.take_yield.store(b, Ordering::SeqCst);
    }
    #[inline(always)]
    pub fn take_yield(&self) -> bool{
        self.take_yield.load(Ordering::SeqCst)
    }
}

impl fmt::Display for ImmixMutatorLocal {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        if self.cursor.is_zero() {
            write!(f, "Mutator (not initialized)")
        } else {
            write!(f, "Mutator:\n").unwrap();
            write!(f, "cursor= {:#X}\n", self.cursor).unwrap();
            write!(f, "limit = {:#X}\n", self.limit).unwrap();
            write!(f, "line  = {}\n", self.line).unwrap();
            write!(f, "block = {}", self.block.as_ref().unwrap())
        }
    }
}