To protect your data, the CISO officer has suggested users to enable GitLab 2FA as soon as possible.

immix_mutator.rs 12.4 KB
Newer Older
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
// Copyright 2017 The Australian National University
// 
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// 
//     http://www.apache.org/licenses/LICENSE-2.0
// 
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

qinsoon's avatar
qinsoon committed
15
16
17
18
use heap::immix;
use heap::immix::ImmixSpace;
use heap::immix::immix_space::ImmixBlock;
use heap::gc;
19
use objectmodel;
qinsoon's avatar
qinsoon committed
20
use utils::Address;
21
use utils::ByteSize;
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40

use std::*;
use std::sync::Arc;
use std::sync::RwLock;
use std::sync::atomic::{AtomicBool, Ordering};

const MAX_MUTATORS : usize = 1024;
lazy_static! {
    pub static ref MUTATORS : RwLock<Vec<Option<Arc<ImmixMutatorGlobal>>>> = {
        let mut ret = Vec::with_capacity(MAX_MUTATORS);
        for _ in 0..MAX_MUTATORS {
            ret.push(None);
        }
        RwLock::new(ret)
    };
    
    pub static ref N_MUTATORS : RwLock<usize> = RwLock::new(0);
}

41
42
const TRACE_ALLOC_FASTPATH : bool = true;

43
44
45
46
47
48
49
#[repr(C)]
pub struct ImmixMutatorLocal {
    id        : usize,
    
    // use raw pointer here instead of AddressMapTable
    // to avoid indirection in fast path    
    alloc_map : *mut u8,
50
    trace_map : *mut u8,
51
52
53
54
55
56
57
58
59
60
61
62
63
64
    space_start: Address,
    
    // cursor might be invalid, but Option<Address> is expensive here
    // after every GC, we set both cursor and limit
    // to Address::zero() so that alloc will branch to slow path    
    cursor    : Address,
    limit     : Address,
    line      : usize,
    
    // globally accessible per-thread fields
    pub global    : Arc<ImmixMutatorGlobal>,
    
    space     : Arc<ImmixSpace>,
    block     : Option<Box<ImmixBlock>>,
65
66

    mark_state: u8
67
68
}

69
lazy_static! {
70
71
    pub static ref CURSOR_OFFSET : usize = offset_of!(ImmixMutatorLocal=>cursor).get_byte_offset();
    pub static ref LIMIT_OFFSET  : usize = offset_of!(ImmixMutatorLocal=>limit).get_byte_offset();
72
73
}

74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
pub struct ImmixMutatorGlobal {
    take_yield : AtomicBool,
    still_blocked : AtomicBool
}

impl ImmixMutatorLocal {
    pub fn reset(&mut self) -> () {
        unsafe {
            // should not use Address::zero() other than initialization
            self.cursor = Address::zero();
            self.limit = Address::zero();
        }
        self.line = immix::LINES_IN_BLOCK;
        
        self.block = None;
    }
90
91
92
93
94

    pub fn reset_after_gc(&mut self) {
        self.reset();
        self.mark_state ^= 1;
    }
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
    
    pub fn new(space : Arc<ImmixSpace>) -> ImmixMutatorLocal {
        let global = Arc::new(ImmixMutatorGlobal::new());
        
        let mut id_lock = N_MUTATORS.write().unwrap();
        {
            let mut mutators_lock = MUTATORS.write().unwrap();
            mutators_lock.remove(*id_lock);
            mutators_lock.insert(*id_lock, Some(global.clone()));
        }
        
        let ret = ImmixMutatorLocal {
            id : *id_lock,
            cursor: unsafe {Address::zero()}, limit: unsafe {Address::zero()}, line: immix::LINES_IN_BLOCK,
            block: None,
            alloc_map: space.alloc_map.ptr,
111
            trace_map: space.trace_map.ptr,
112
113
            space_start: space.start(),
            global: global,
114
115
            space: space,
            mark_state: objectmodel::INIT_MARK_STATE as u8
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
        };
        *id_lock += 1;
        
        ret
    }
    
    pub fn destroy(&mut self) {
        {
            self.return_block();
        }
        
        let mut mutator_count_lock = N_MUTATORS.write().unwrap();
        
        let mut mutators_lock = MUTATORS.write().unwrap();
        mutators_lock.push(None);
        mutators_lock.swap_remove(self.id);
        
        *mutator_count_lock = *mutator_count_lock - 1;
        
        if cfg!(debug_assertions) {
qinsoon's avatar
qinsoon committed
136
            debug!("destroy mutator. Now live mutators = {}", *mutator_count_lock);
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
        }
    }
    
    #[inline(always)]
    pub fn yieldpoint(&mut self) {
        if self.global.take_yield() {
            self.yieldpoint_slow();
        }
    }
    
    #[inline(never)]
    pub fn yieldpoint_slow(&mut self) {
        trace!("Mutator{}: yieldpoint triggered, slow path", self.id);
        gc::sync_barrier(self);
    }
    
    #[inline(always)]
    pub fn alloc(&mut self, size: usize, align: usize) -> Address {
155
156
        // this part of code will slow down allocation
        let align = objectmodel::check_alignment(align);
qinsoon's avatar
qinsoon committed
157
        let size = size + objectmodel::OBJECT_HEADER_SIZE;
158
        // end
qinsoon's avatar
qinsoon committed
159

160
161
162
163
        if TRACE_ALLOC_FASTPATH {
            trace!("Mutator{}: fastpath alloc: size={}, align={}", self.id, size, align);
        }

164
        let start = self.cursor.align_up(align);
165
        let end = start + size;
166

167
168
169
170
        if TRACE_ALLOC_FASTPATH {
            trace!("Mutator{}: fastpath alloc: start=0x{:x}, end=0x{:x}", self.id, start, end);
        }

171
        if end > self.limit {
172
            let ret = self.try_alloc_from_local(size, align);
173
174
175
            if TRACE_ALLOC_FASTPATH {
                trace!("Mutator{}: fastpath alloc: try_alloc_from_local()=0x{:x}", self.id, ret);
            }
176
177
178
179
180
181
182
183
            
            if cfg!(debug_assertions) {
                if !ret.is_aligned_to(align) {
                    use std::process;
                    println!("wrong alignment on 0x{:x}, expected align: {}", ret, align);
                    process::exit(102);
                }
            }
184
185

            // this offset should be removed as well (for performance)
186
            ret + (-objectmodel::OBJECT_HEADER_OFFSET)
187
        } else {
188
189
190
191
192
193
194
            if cfg!(debug_assertions) {
                if !start.is_aligned_to(align) {
                    use std::process;
                    println!("wrong alignment on 0x{:x}, expected align: {}", start, align);
                    process::exit(102);
                }
            }
195
196
            self.cursor = end;
            
197
            start + (-objectmodel::OBJECT_HEADER_OFFSET)
198
199
200
201
        } 
    }
    
    #[inline(always)]
202
203
    #[cfg(feature = "use-sidemap")]
    pub fn init_object(&mut self, addr: Address, encode: u64) {
204
205
206
207
208
209
//        unsafe {
//            *self.alloc_map.offset((addr.diff(self.space_start) >> LOG_POINTER_SIZE) as isize) = encode as u8;
//            objectmodel::mark_as_untraced(self.trace_map, self.space_start, addr, self.mark_state);
//        }

        unimplemented!()
210
    }
211
212
213
214
    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    pub fn init_object(&mut self, addr: Address, encode: u64) {
        unsafe {
215
            (addr + objectmodel::OBJECT_HEADER_OFFSET).store(encode);
qinsoon's avatar
qinsoon committed
216
217
218
219
220
221
222
223
224
225
226
227
228
        }
    }

    #[inline(always)]
    #[cfg(feature = "use-sidemap")]
    pub fn init_hybrid(&mut self, addr: Address, encode: u64, len: u64) {
        unimplemented!()
    }
    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    pub fn init_hybrid(&mut self, addr: Address, encode: u64, len: u64) {
        let encode = encode | ((len << objectmodel::SHR_HYBRID_LENGTH) & objectmodel::MASK_HYBRID_LENGTH);
        unsafe {
229
            (addr + objectmodel::OBJECT_HEADER_OFFSET).store(encode);
230
        }
231
232
233
234
235
236
237
238
239
240
241
242
243
244
    }
    
    #[inline(never)]
    pub fn try_alloc_from_local(&mut self, size : usize, align: usize) -> Address {
        if self.line < immix::LINES_IN_BLOCK {
            let opt_next_available_line = {
                let cur_line = self.line;
                self.block().get_next_available_line(cur_line)
            };
    
            match opt_next_available_line {
                Some(next_available_line) => {
                    // we can alloc from local blocks
                    let end_line = self.block().get_next_unavailable_line(next_available_line);
qinsoon's avatar
qinsoon committed
245

246
247
                    self.cursor = self.block().start() + (next_available_line << immix::LOG_BYTES_IN_LINE);
                    self.limit  = self.block().start() + (end_line << immix::LOG_BYTES_IN_LINE);
248
249
                    self.line   = end_line;
                    
250
                    unsafe {self.cursor.memset(0, self.limit - self.cursor);}
251
252
253
254
                    
                    for line in next_available_line..end_line {
                        self.block().line_mark_table_mut().set(line, immix::LineMark::FreshAlloc);
                    }
255
256
257

                    // allocate fast path
                    let start = self.cursor.align_up(align);
258
                    let end = start + size;
259
260
261

                    self.cursor = end;
                    start
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
                },
                None => {
                    self.alloc_from_global(size, align)
                }
            }
        } else {
            // we need to alloc from global space
            self.alloc_from_global(size, align)
        }
    }
    
    fn alloc_from_global(&mut self, size: usize, align: usize) -> Address {
        trace!("Mutator{}: slowpath: alloc_from_global", self.id);
        
        self.return_block();

        loop {
            // check if yield
            self.yieldpoint();
            
            let new_block : Option<Box<ImmixBlock>> = self.space.get_next_usable_block();
            
            match new_block {
qinsoon's avatar
qinsoon committed
285
                Some(b) => {
qinsoon's avatar
qinsoon committed
286
287
288
                    // zero the block - do not need to zero the block here
                    // we zero lines that get used in try_alloc_from_local()
//                    b.lazy_zeroing();
289

290
291
292
293
                    self.block    = Some(b);
                    self.cursor   = self.block().start();
                    self.limit    = self.block().start();
                    self.line     = 0;
294
295

                    trace!("Mutator{}: slowpath: new block starting from 0x{:x}", self.id, self.cursor);
296
297

                    return self.try_alloc_from_local(size, align);
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
                },
                None => {continue; }
            }
        }
    }
    
    pub fn prepare_for_gc(&mut self) {
        self.return_block();
    }
    
    pub fn id(&self) -> usize {
        self.id
    }

    fn return_block(&mut self) {
        if self.block.is_some() {
qinsoon's avatar
qinsoon committed
314
            trace!("finishing block {:?}", self.block.as_ref().unwrap());
qinsoon's avatar
qinsoon committed
315
316
317
318
319
320

            if cfg!(debug_assertions) {
                let block = self.block.as_ref().unwrap();
                ImmixMutatorLocal::sanity_check_finished_block(block);
            }

321
322
323
            self.space.return_used_block(self.block.take().unwrap());
        }        
    }
qinsoon's avatar
qinsoon committed
324
325

    #[cfg(feature = "use-sidemap")]
qinsoon's avatar
qinsoon committed
326
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
327
328
329
330
331
    fn sanity_check_finished_block(block: &ImmixBlock) {

    }

    #[cfg(not(feature = "use-sidemap"))]
qinsoon's avatar
qinsoon committed
332
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
333
334
335
336
    fn sanity_check_finished_block(block: &ImmixBlock) {

    }

337
338
339
340
341
342
343
344
345
    fn block(&mut self) -> &mut ImmixBlock {
        self.block.as_mut().unwrap()
    }
    
    pub fn print_object(&self, obj: Address, length: usize) {
        ImmixMutatorLocal::print_object_static(obj, length);
    }
    
    pub fn print_object_static(obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
346
        debug!("===Object {:#X} size: {} bytes===", obj, length);
347
        let mut cur_addr = obj;
348
        while cur_addr < obj + length {
qinsoon's avatar
qinsoon committed
349
            debug!("Address: {:#X}   {:#X}", cur_addr, unsafe {cur_addr.load::<u64>()});
350
            cur_addr = cur_addr + 8 as ByteSize;
351
        }
qinsoon's avatar
qinsoon committed
352
353
        debug!("----");
        debug!("=========");        
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
    }
}

impl ImmixMutatorGlobal {
    pub fn new() -> ImmixMutatorGlobal {
        ImmixMutatorGlobal {
            take_yield: AtomicBool::new(false),
            still_blocked: AtomicBool::new(false)
        }
    }
    
    #[inline(always)]
    pub fn is_still_blocked(&self) -> bool {
        self.still_blocked.load(Ordering::SeqCst)
    }
    pub fn set_still_blocked(&self, b : bool) {
        self.still_blocked.store(b, Ordering::SeqCst);
    }
    
    pub fn set_take_yield(&self, b : bool) {
        self.take_yield.store(b, Ordering::SeqCst);
    }
    #[inline(always)]
    pub fn take_yield(&self) -> bool{
        self.take_yield.load(Ordering::SeqCst)
    }
}

impl fmt::Display for ImmixMutatorLocal {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        if self.cursor.is_zero() {
            write!(f, "Mutator (not initialized)")
        } else {
            write!(f, "Mutator:\n").unwrap();
            write!(f, "cursor= {:#X}\n", self.cursor).unwrap();
            write!(f, "limit = {:#X}\n", self.limit).unwrap();
            write!(f, "line  = {}\n", self.line).unwrap();
            write!(f, "block = {}", self.block.as_ref().unwrap())
        }
    }
394
}