To protect your data, the CISO officer has suggested users to enable GitLab 2FA as soon as possible.

immix_mutator.rs 10.9 KB
Newer Older
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
1
// Copyright 2017 The Australian National University
qinsoon's avatar
qinsoon committed
2
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
3
4
5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
qinsoon's avatar
qinsoon committed
6
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
7
//     http://www.apache.org/licenses/LICENSE-2.0
qinsoon's avatar
qinsoon committed
8
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
9
10
11
12
13
14
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

qinsoon's avatar
qinsoon committed
15
16
use heap::Mutator;
use heap::immix::*;
qinsoon's avatar
qinsoon committed
17
18
use heap::immix::ImmixSpace;
use heap::immix::immix_space::ImmixBlock;
19
use objectmodel;
qinsoon's avatar
qinsoon committed
20
use utils::Address;
21
use utils::ByteSize;
22
23
use std::*;

qinsoon's avatar
qinsoon committed
24
const TRACE_ALLOC: bool = true;
25

26
#[repr(C)]
qinsoon's avatar
qinsoon committed
27
pub struct ImmixAllocator {
28
29
    // cursor might be invalid, but Option<Address> is expensive here
    // after every GC, we set both cursor and limit
qinsoon's avatar
qinsoon committed
30
31
32
    // to Address::zero() so that alloc will branch to slow path
    cursor: Address,
    limit: Address,
qinsoon's avatar
qinsoon committed
33
    line: usize,
qinsoon's avatar
qinsoon committed
34
    block: Option<Raw<ImmixBlock>>,
qinsoon's avatar
qinsoon committed
35
36
37
38
39
40

    large_cursor: Address,
    large_limit: Address,
    large_block: Option<Raw<ImmixBlock>>,

    space: Raw<ImmixSpace>,
qinsoon's avatar
qinsoon committed
41
    mutator: *mut Mutator
42
43
}

44
lazy_static! {
qinsoon's avatar
qinsoon committed
45
46
    pub static ref CURSOR_OFFSET : usize = offset_of!(ImmixAllocator=>cursor).get_byte_offset();
    pub static ref LIMIT_OFFSET  : usize = offset_of!(ImmixAllocator=>limit).get_byte_offset();
47
48
}

qinsoon's avatar
qinsoon committed
49
impl ImmixAllocator {
50
51
52
53
54
    pub fn reset(&mut self) -> () {
        unsafe {
            // should not use Address::zero() other than initialization
            self.cursor = Address::zero();
            self.limit = Address::zero();
qinsoon's avatar
qinsoon committed
55
56
            self.large_cursor = Address::zero();
            self.large_limit = Address::zero();
57
        }
qinsoon's avatar
qinsoon committed
58
        self.line = LINES_IN_BLOCK;
59
        self.block = None;
qinsoon's avatar
qinsoon committed
60
        self.large_block = None;
61
    }
62
63
64
65

    pub fn reset_after_gc(&mut self) {
        self.reset();
    }
qinsoon's avatar
qinsoon committed
66

qinsoon's avatar
qinsoon committed
67
68
    pub fn new(space: Raw<ImmixSpace>) -> ImmixAllocator {
        ImmixAllocator {
qinsoon's avatar
qinsoon committed
69
70
            cursor: unsafe { Address::zero() },
            limit: unsafe { Address::zero() },
qinsoon's avatar
qinsoon committed
71
            line: LINES_IN_BLOCK,
72
            block: None,
qinsoon's avatar
qinsoon committed
73
74
75
            large_cursor: unsafe { Address::zero() },
            large_limit: unsafe { Address::zero() },
            large_block: None,
qinsoon's avatar
qinsoon committed
76
77
            space,
            mutator: ptr::null_mut()
78
79
        }
    }
qinsoon's avatar
qinsoon committed
80

qinsoon's avatar
qinsoon committed
81
82
    pub fn set_mutator(&mut self, mutator: *mut Mutator) {
        self.mutator = mutator;
83
    }
qinsoon's avatar
qinsoon committed
84

qinsoon's avatar
qinsoon committed
85
    pub fn destroy(&mut self) {
qinsoon's avatar
qinsoon committed
86
87
        self.return_block(true);
        self.return_block(false);
88
    }
qinsoon's avatar
qinsoon committed
89

90
91
    #[inline(always)]
    pub fn alloc(&mut self, size: usize, align: usize) -> Address {
92
93
94
        // this part of code will slow down allocation
        let align = objectmodel::check_alignment(align);
        // end
qinsoon's avatar
qinsoon committed
95

qinsoon's avatar
qinsoon committed
96
97
98
99
100
101
        trace_if!(
            TRACE_ALLOC,
            "Mutator: fastpath alloc: size={}, align={}",
            size,
            align
        );
102

103
        let start = self.cursor.align_up(align);
104
        let end = start + size;
105

qinsoon's avatar
qinsoon committed
106
107
108
109
110
111
        trace_if!(
            TRACE_ALLOC,
            "Mutator: fastpath alloc: start=0x{:x}, end=0x{:x}",
            start,
            end
        );
112

113
        if end > self.limit {
qinsoon's avatar
qinsoon committed
114
            self.alloc_slow(size, align)
115
116
        } else {
            self.cursor = end;
qinsoon's avatar
qinsoon committed
117
118
119
120
121
            start
        }
    }

    #[inline(never)]
qinsoon's avatar
qinsoon committed
122
123
124
125
126
127
128
129
130
131
132
133
134
135
    pub fn alloc_slow(&mut self, size: usize, align: usize) -> Address {
        if size > BYTES_IN_LINE {
            trace_if!(TRACE_ALLOC, "Mutator: overflow alloc()");
            self.overflow_alloc(size, align)
        } else {
            trace_if!(
                TRACE_ALLOC,
                "Mutator: fastpath alloc: try_alloc_from_local()"
            );
            self.try_alloc_from_local(size, align)
        }
    }

    #[inline(always)]
qinsoon's avatar
qinsoon committed
136
    pub fn post_alloc(&mut self, obj: Address, size: usize) {
qinsoon's avatar
qinsoon committed
137
138
139
140
141
142
143
        if size > BYTES_IN_LINE {
            let index = self.space.get_word_index(obj);
            let slot = self.space.get_gc_byte_slot(index);
            unsafe { slot.store(slot.load::<u8>() | GC_STRADDLE_BIT) }
        }
    }

qinsoon's avatar
qinsoon committed
144
145
146
    pub fn overflow_alloc(&mut self, size: usize, align: usize) -> Address {
        let start = self.large_cursor.align_up(align);
        let end = start + size;
qinsoon's avatar
qinsoon committed
147

qinsoon's avatar
qinsoon committed
148
149
150
151
152
153
154
155
156
157
158
159
        trace_if!(
            TRACE_ALLOC,
            "Mutator: overflow alloc: start={}, end={}",
            start,
            end
        );

        if end > self.large_limit {
            self.alloc_from_global(size, align, true)
        } else {
            self.large_cursor = end;
            start
qinsoon's avatar
qinsoon committed
160
        }
161
    }
qinsoon's avatar
qinsoon committed
162

163
    #[inline(always)]
164
    #[cfg(feature = "use-sidemap")]
qinsoon's avatar
qinsoon committed
165
    pub fn init_object<T>(&mut self, addr: Address, encode: T) {
qinsoon's avatar
qinsoon committed
166
        let map_slot = ImmixSpace::get_type_byte_slot_static(addr);
qinsoon's avatar
qinsoon committed
167
168
169
        unsafe {
            map_slot.store(encode);
        }
170
    }
qinsoon's avatar
qinsoon committed
171

172
173
174
175
    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    pub fn init_object(&mut self, addr: Address, encode: u64) {
        unsafe {
176
            (addr + objectmodel::OBJECT_HEADER_OFFSET).store(encode);
qinsoon's avatar
qinsoon committed
177
178
179
180
181
        }
    }

    #[inline(always)]
    #[cfg(feature = "use-sidemap")]
qinsoon's avatar
qinsoon committed
182
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
183
    pub fn init_hybrid<T>(&mut self, addr: Address, encode: T, len: u64) {
qinsoon's avatar
qinsoon committed
184
185
        unimplemented!()
    }
qinsoon's avatar
qinsoon committed
186

qinsoon's avatar
qinsoon committed
187
188
189
    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    pub fn init_hybrid(&mut self, addr: Address, encode: u64, len: u64) {
qinsoon's avatar
qinsoon committed
190
191
        let encode =
            encode | ((len << objectmodel::SHR_HYBRID_LENGTH) & objectmodel::MASK_HYBRID_LENGTH);
qinsoon's avatar
qinsoon committed
192
        unsafe {
193
            (addr + objectmodel::OBJECT_HEADER_OFFSET).store(encode);
194
        }
195
    }
qinsoon's avatar
qinsoon committed
196
197

    pub fn try_alloc_from_local(&mut self, size: usize, align: usize) -> Address {
qinsoon's avatar
qinsoon committed
198
        if self.line < LINES_IN_BLOCK {
199
200
201
202
            let opt_next_available_line = {
                let cur_line = self.line;
                self.block().get_next_available_line(cur_line)
            };
qinsoon's avatar
qinsoon committed
203
204
205
206
207
            trace_if!(
                TRACE_ALLOC,
                "Mutator: alloc from local, next available line: {:?}",
                opt_next_available_line
            );
qinsoon's avatar
qinsoon committed
208

209
210
211
212
            match opt_next_available_line {
                Some(next_available_line) => {
                    // we can alloc from local blocks
                    let end_line = self.block().get_next_unavailable_line(next_available_line);
qinsoon's avatar
qinsoon committed
213

qinsoon's avatar
qinsoon committed
214
215
216
217
                    self.cursor = self.block().mem_start() +
                        ((next_available_line as usize) << LOG_BYTES_IN_LINE);
                    self.limit =
                        self.block().mem_start() + ((end_line as usize) << LOG_BYTES_IN_LINE);
qinsoon's avatar
qinsoon committed
218
219
220
221
222
223
                    self.line = end_line;

                    unsafe {
                        self.cursor.memset(0, self.limit - self.cursor);
                    }

224
                    for line in next_available_line..end_line {
qinsoon's avatar
qinsoon committed
225
                        self.block().set_line_mark(line, LineMark::FreshAlloc);
226
                    }
227

qinsoon's avatar
qinsoon committed
228
                    self.alloc(size, align)
229
                }
qinsoon's avatar
qinsoon committed
230
                None => self.alloc_from_global(size, align, false)
231
232
233
            }
        } else {
            // we need to alloc from global space
qinsoon's avatar
qinsoon committed
234
            self.alloc_from_global(size, align, false)
235
236
        }
    }
qinsoon's avatar
qinsoon committed
237

qinsoon's avatar
qinsoon committed
238
239
240
    fn alloc_from_global(&mut self, size: usize, align: usize, request_large: bool) -> Address {
        trace!("Mutator: slowpath: alloc_from_global()");
        self.return_block(request_large);
241
242
243

        loop {
            // check if yield
qinsoon's avatar
qinsoon committed
244
            unsafe { &mut *self.mutator }.yieldpoint();
qinsoon's avatar
qinsoon committed
245

qinsoon's avatar
qinsoon committed
246
            let new_block: Option<Raw<ImmixBlock>> = self.space.get_next_usable_block();
qinsoon's avatar
qinsoon committed
247

248
            match new_block {
qinsoon's avatar
qinsoon committed
249
                Some(b) => {
qinsoon's avatar
qinsoon committed
250
251
                    // zero the block - do not need to zero the block here
                    // we zero lines that get used in try_alloc_from_local()
qinsoon's avatar
qinsoon committed
252
                    //                    b.lazy_zeroing();
253

qinsoon's avatar
qinsoon committed
254
255
                    if request_large {
                        self.large_cursor = b.mem_start();
qinsoon's avatar
qinsoon committed
256
                        self.large_limit = b.mem_start() + BYTES_IN_BLOCK;
qinsoon's avatar
qinsoon committed
257
258
                        self.large_block = Some(b);

qinsoon's avatar
qinsoon committed
259
260
261
262
263
                        trace!(
                            "Mutator: slowpath: new large_block starting from 0x{:x}",
                            self.large_cursor
                        );

qinsoon's avatar
qinsoon committed
264
265
266
267
268
269
                        return self.alloc(size, align);
                    } else {
                        self.cursor = b.mem_start();
                        self.limit = b.mem_start();
                        self.line = 0;
                        self.block = Some(b);
270

qinsoon's avatar
qinsoon committed
271
272
273
274
                        trace!(
                            "Mutator: slowpath: new block starting from 0x{:x}",
                            self.cursor
                        );
275

qinsoon's avatar
qinsoon committed
276
                        return self.alloc(size, align);
qinsoon's avatar
qinsoon committed
277
                    }
qinsoon's avatar
qinsoon committed
278
279
280
281
                }
                None => {
                    continue;
                }
282
283
284
            }
        }
    }
qinsoon's avatar
qinsoon committed
285

286
    pub fn prepare_for_gc(&mut self) {
qinsoon's avatar
qinsoon committed
287
288
        self.return_block(true);
        self.return_block(false);
289
    }
qinsoon's avatar
qinsoon committed
290

qinsoon's avatar
qinsoon committed
291
292
293
294
295
296
297
298
299
300
301
302
303
304
    fn return_block(&mut self, request_large: bool) {
        if request_large {
            if self.large_block.is_some() {
                trace!(
                    "finishing large block {}",
                    self.large_block.as_ref().unwrap().addr()
                );
                self.space
                    .return_used_block(self.large_block.take().unwrap());
            }
        } else {
            if self.block.is_some() {
                trace!("finishing block {}", self.block.as_ref().unwrap().addr());
                self.space.return_used_block(self.block.take().unwrap());
qinsoon's avatar
qinsoon committed
305
            }
qinsoon's avatar
qinsoon committed
306
        }
307
    }
qinsoon's avatar
qinsoon committed
308

309
310
311
    fn block(&mut self) -> &mut ImmixBlock {
        self.block.as_mut().unwrap()
    }
qinsoon's avatar
qinsoon committed
312

313
    pub fn print_object(&self, obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
314
        ImmixAllocator::print_object_static(obj, length);
315
    }
qinsoon's avatar
qinsoon committed
316

317
    pub fn print_object_static(obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
318
        debug!("===Object {:#X} size: {} bytes===", obj, length);
319
        let mut cur_addr = obj;
320
        while cur_addr < obj + length {
qinsoon's avatar
qinsoon committed
321
322
323
            debug!("Address: {:#X}   {:#X}", cur_addr, unsafe {
                cur_addr.load::<u64>()
            });
324
            cur_addr = cur_addr + 8 as ByteSize;
325
        }
qinsoon's avatar
qinsoon committed
326
        debug!("----");
qinsoon's avatar
qinsoon committed
327
        debug!("=========");
328
329
330
    }
}

qinsoon's avatar
qinsoon committed
331
impl fmt::Display for ImmixAllocator {
332
333
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        if self.cursor.is_zero() {
qinsoon's avatar
qinsoon committed
334
            write!(f, "Mutator (not initialized)").unwrap();
335
336
337
338
339
        } else {
            write!(f, "Mutator:\n").unwrap();
            write!(f, "cursor= {:#X}\n", self.cursor).unwrap();
            write!(f, "limit = {:#X}\n", self.limit).unwrap();
            write!(f, "line  = {}\n", self.line).unwrap();
qinsoon's avatar
qinsoon committed
340
341
            write!(f, "large cursor = {}\n", self.large_cursor).unwrap();
            write!(f, "large limit  = {}\n", self.large_limit).unwrap();
342
        }
qinsoon's avatar
qinsoon committed
343
        Ok(())
344
    }
345
}