To protect your data, the CISO officer has suggested users to enable GitLab 2FA as soon as possible.

immix_mutator.rs 10 KB
Newer Older
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
1
// Copyright 2017 The Australian National University
qinsoon's avatar
qinsoon committed
2
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
3
4
5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
qinsoon's avatar
qinsoon committed
6
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
7
//     http://www.apache.org/licenses/LICENSE-2.0
qinsoon's avatar
qinsoon committed
8
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
9
10
11
12
13
14
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

qinsoon's avatar
qinsoon committed
15
use heap::*;
qinsoon's avatar
qinsoon committed
16
17
use heap::immix::ImmixSpace;
use heap::immix::immix_space::ImmixBlock;
18
use objectmodel;
qinsoon's avatar
qinsoon committed
19
use utils::Address;
20
use utils::ByteSize;
21
22
use std::*;

qinsoon's avatar
qinsoon committed
23
const TRACE_ALLOC: bool = true;
24

25
#[repr(C)]
qinsoon's avatar
qinsoon committed
26
pub struct ImmixAllocator {
27
28
    // cursor might be invalid, but Option<Address> is expensive here
    // after every GC, we set both cursor and limit
qinsoon's avatar
qinsoon committed
29
30
31
    // to Address::zero() so that alloc will branch to slow path
    cursor: Address,
    limit: Address,
qinsoon's avatar
qinsoon committed
32
    line: usize,
qinsoon's avatar
qinsoon committed
33
    block: Option<Raw<ImmixBlock>>,
qinsoon's avatar
qinsoon committed
34
35
36
37
38
39

    large_cursor: Address,
    large_limit: Address,
    large_block: Option<Raw<ImmixBlock>>,

    space: Raw<ImmixSpace>,
qinsoon's avatar
qinsoon committed
40
    mutator: *mut Mutator
41
42
}

43
lazy_static! {
qinsoon's avatar
qinsoon committed
44
45
    pub static ref CURSOR_OFFSET : ByteSize = offset_of!(ImmixAllocator=>cursor).get_byte_offset();
    pub static ref LIMIT_OFFSET  : ByteSize = offset_of!(ImmixAllocator=>limit).get_byte_offset();
46
47
}

qinsoon's avatar
qinsoon committed
48
49
impl Allocator for ImmixAllocator {
    fn reset_after_gc(&mut self) {
50
51
        self.reset();
    }
qinsoon's avatar
qinsoon committed
52

qinsoon's avatar
qinsoon committed
53
54
55
    fn prepare_for_gc(&mut self) {
        self.return_block(true);
        self.return_block(false);
56
    }
qinsoon's avatar
qinsoon committed
57

qinsoon's avatar
qinsoon committed
58
    fn set_mutator(&mut self, mutator: *mut Mutator) {
qinsoon's avatar
qinsoon committed
59
        self.mutator = mutator;
60
    }
qinsoon's avatar
qinsoon committed
61

qinsoon's avatar
qinsoon committed
62
    fn destroy(&mut self) {
qinsoon's avatar
qinsoon committed
63
64
        self.return_block(true);
        self.return_block(false);
65
    }
qinsoon's avatar
qinsoon committed
66

67
    #[inline(always)]
qinsoon's avatar
qinsoon committed
68
    fn alloc(&mut self, size: usize, align: usize) -> Address {
69
70
71
        // this part of code will slow down allocation
        let align = objectmodel::check_alignment(align);
        // end
qinsoon's avatar
qinsoon committed
72

qinsoon's avatar
qinsoon committed
73
74
75
76
77
78
        trace_if!(
            TRACE_ALLOC,
            "Mutator: fastpath alloc: size={}, align={}",
            size,
            align
        );
79

80
        let start = self.cursor.align_up(align);
81
        let end = start + size;
82

qinsoon's avatar
qinsoon committed
83
84
85
86
87
88
        trace_if!(
            TRACE_ALLOC,
            "Mutator: fastpath alloc: start=0x{:x}, end=0x{:x}",
            start,
            end
        );
89

90
        if end > self.limit {
qinsoon's avatar
qinsoon committed
91
            self.alloc_slow(size, align)
92
93
        } else {
            self.cursor = end;
qinsoon's avatar
qinsoon committed
94
95
96
            start
        }
    }
qinsoon's avatar
qinsoon committed
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
}

impl ImmixAllocator {
    fn reset(&mut self) -> () {
        unsafe {
            // should not use Address::zero() other than initialization
            self.cursor = Address::zero();
            self.limit = Address::zero();
            self.large_cursor = Address::zero();
            self.large_limit = Address::zero();
        }
        self.line = LINES_IN_BLOCK;
        self.block = None;
        self.large_block = None;
    }

    pub fn new(space: Raw<ImmixSpace>) -> ImmixAllocator {
        ImmixAllocator {
            cursor: unsafe { Address::zero() },
            limit: unsafe { Address::zero() },
            line: LINES_IN_BLOCK,
            block: None,
            large_cursor: unsafe { Address::zero() },
            large_limit: unsafe { Address::zero() },
            large_block: None,
            space,
            mutator: ptr::null_mut()
        }
    }
qinsoon's avatar
qinsoon committed
126
127

    #[inline(never)]
qinsoon's avatar
qinsoon committed
128
129
130
131
132
133
134
135
136
137
138
139
140
141
    pub fn alloc_slow(&mut self, size: usize, align: usize) -> Address {
        if size > BYTES_IN_LINE {
            trace_if!(TRACE_ALLOC, "Mutator: overflow alloc()");
            self.overflow_alloc(size, align)
        } else {
            trace_if!(
                TRACE_ALLOC,
                "Mutator: fastpath alloc: try_alloc_from_local()"
            );
            self.try_alloc_from_local(size, align)
        }
    }

    #[inline(always)]
qinsoon's avatar
qinsoon committed
142
    pub fn post_alloc(&mut self, obj: Address, size: usize) {
qinsoon's avatar
qinsoon committed
143
144
145
146
147
148
149
        if size > BYTES_IN_LINE {
            let index = self.space.get_word_index(obj);
            let slot = self.space.get_gc_byte_slot(index);
            unsafe { slot.store(slot.load::<u8>() | GC_STRADDLE_BIT) }
        }
    }

qinsoon's avatar
qinsoon committed
150
151
152
    pub fn overflow_alloc(&mut self, size: usize, align: usize) -> Address {
        let start = self.large_cursor.align_up(align);
        let end = start + size;
qinsoon's avatar
qinsoon committed
153

qinsoon's avatar
qinsoon committed
154
155
156
157
158
159
160
161
162
163
164
165
        trace_if!(
            TRACE_ALLOC,
            "Mutator: overflow alloc: start={}, end={}",
            start,
            end
        );

        if end > self.large_limit {
            self.alloc_from_global(size, align, true)
        } else {
            self.large_cursor = end;
            start
qinsoon's avatar
qinsoon committed
166
        }
167
    }
qinsoon's avatar
qinsoon committed
168

169
    #[inline(always)]
qinsoon's avatar
qinsoon committed
170
    pub fn init_object<T>(&mut self, addr: Address, encode: T) {
qinsoon's avatar
qinsoon committed
171
        let map_slot = ImmixSpace::get_type_byte_slot_static(addr);
qinsoon's avatar
qinsoon committed
172
173
174
        unsafe {
            map_slot.store(encode);
        }
175
    }
qinsoon's avatar
qinsoon committed
176

qinsoon's avatar
qinsoon committed
177
    pub fn try_alloc_from_local(&mut self, size: usize, align: usize) -> Address {
qinsoon's avatar
qinsoon committed
178
        if self.line < LINES_IN_BLOCK {
179
180
181
182
            let opt_next_available_line = {
                let cur_line = self.line;
                self.block().get_next_available_line(cur_line)
            };
qinsoon's avatar
qinsoon committed
183
184
185
186
187
            trace_if!(
                TRACE_ALLOC,
                "Mutator: alloc from local, next available line: {:?}",
                opt_next_available_line
            );
qinsoon's avatar
qinsoon committed
188

189
190
191
192
            match opt_next_available_line {
                Some(next_available_line) => {
                    // we can alloc from local blocks
                    let end_line = self.block().get_next_unavailable_line(next_available_line);
qinsoon's avatar
qinsoon committed
193

qinsoon's avatar
qinsoon committed
194
195
196
197
                    self.cursor = self.block().mem_start() +
                        ((next_available_line as usize) << LOG_BYTES_IN_LINE);
                    self.limit =
                        self.block().mem_start() + ((end_line as usize) << LOG_BYTES_IN_LINE);
qinsoon's avatar
qinsoon committed
198
199
200
201
202
203
                    self.line = end_line;

                    unsafe {
                        self.cursor.memset(0, self.limit - self.cursor);
                    }

204
                    for line in next_available_line..end_line {
qinsoon's avatar
qinsoon committed
205
                        self.block().set_line_mark(line, LineMark::FreshAlloc);
206
                    }
207

qinsoon's avatar
qinsoon committed
208
                    self.alloc(size, align)
209
                }
qinsoon's avatar
qinsoon committed
210
                None => self.alloc_from_global(size, align, false)
211
212
213
            }
        } else {
            // we need to alloc from global space
qinsoon's avatar
qinsoon committed
214
            self.alloc_from_global(size, align, false)
215
216
        }
    }
qinsoon's avatar
qinsoon committed
217

qinsoon's avatar
qinsoon committed
218
219
220
    fn alloc_from_global(&mut self, size: usize, align: usize, request_large: bool) -> Address {
        trace!("Mutator: slowpath: alloc_from_global()");
        self.return_block(request_large);
221
222
223

        loop {
            // check if yield
qinsoon's avatar
qinsoon committed
224
            unsafe { &mut *self.mutator }.yieldpoint();
qinsoon's avatar
qinsoon committed
225

qinsoon's avatar
qinsoon committed
226
            let new_block: Option<Raw<ImmixBlock>> = self.space.get_next_usable_block();
qinsoon's avatar
qinsoon committed
227

228
            match new_block {
qinsoon's avatar
qinsoon committed
229
                Some(b) => {
qinsoon's avatar
qinsoon committed
230
231
                    // zero the block - do not need to zero the block here
                    // we zero lines that get used in try_alloc_from_local()
qinsoon's avatar
qinsoon committed
232
                    //                    b.lazy_zeroing();
233

qinsoon's avatar
qinsoon committed
234
235
                    if request_large {
                        self.large_cursor = b.mem_start();
qinsoon's avatar
qinsoon committed
236
                        self.large_limit = b.mem_start() + BYTES_IN_BLOCK;
qinsoon's avatar
qinsoon committed
237
238
                        self.large_block = Some(b);

qinsoon's avatar
qinsoon committed
239
240
241
242
243
                        trace!(
                            "Mutator: slowpath: new large_block starting from 0x{:x}",
                            self.large_cursor
                        );

qinsoon's avatar
qinsoon committed
244
245
246
247
248
249
                        return self.alloc(size, align);
                    } else {
                        self.cursor = b.mem_start();
                        self.limit = b.mem_start();
                        self.line = 0;
                        self.block = Some(b);
250

qinsoon's avatar
qinsoon committed
251
252
253
254
                        trace!(
                            "Mutator: slowpath: new block starting from 0x{:x}",
                            self.cursor
                        );
255

qinsoon's avatar
qinsoon committed
256
                        return self.alloc(size, align);
qinsoon's avatar
qinsoon committed
257
                    }
qinsoon's avatar
qinsoon committed
258
259
260
261
                }
                None => {
                    continue;
                }
262
263
264
            }
        }
    }
qinsoon's avatar
qinsoon committed
265

qinsoon's avatar
qinsoon committed
266

qinsoon's avatar
qinsoon committed
267

qinsoon's avatar
qinsoon committed
268
269
270
271
272
273
274
275
276
277
278
279
280
281
    fn return_block(&mut self, request_large: bool) {
        if request_large {
            if self.large_block.is_some() {
                trace!(
                    "finishing large block {}",
                    self.large_block.as_ref().unwrap().addr()
                );
                self.space
                    .return_used_block(self.large_block.take().unwrap());
            }
        } else {
            if self.block.is_some() {
                trace!("finishing block {}", self.block.as_ref().unwrap().addr());
                self.space.return_used_block(self.block.take().unwrap());
qinsoon's avatar
qinsoon committed
282
            }
qinsoon's avatar
qinsoon committed
283
        }
284
    }
qinsoon's avatar
qinsoon committed
285

286
287
288
    fn block(&mut self) -> &mut ImmixBlock {
        self.block.as_mut().unwrap()
    }
qinsoon's avatar
qinsoon committed
289

290
    pub fn print_object(&self, obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
291
        ImmixAllocator::print_object_static(obj, length);
292
    }
qinsoon's avatar
qinsoon committed
293

294
    pub fn print_object_static(obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
295
        debug!("===Object {:#X} size: {} bytes===", obj, length);
296
        let mut cur_addr = obj;
297
        while cur_addr < obj + length {
qinsoon's avatar
qinsoon committed
298
299
300
            debug!("Address: {:#X}   {:#X}", cur_addr, unsafe {
                cur_addr.load::<u64>()
            });
301
            cur_addr = cur_addr + 8 as ByteSize;
302
        }
qinsoon's avatar
qinsoon committed
303
        debug!("----");
qinsoon's avatar
qinsoon committed
304
        debug!("=========");
305
306
307
    }
}

qinsoon's avatar
qinsoon committed
308
impl fmt::Display for ImmixAllocator {
309
310
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        if self.cursor.is_zero() {
qinsoon's avatar
qinsoon committed
311
            write!(f, "Mutator (not initialized)").unwrap();
312
313
314
315
316
        } else {
            write!(f, "Mutator:\n").unwrap();
            write!(f, "cursor= {:#X}\n", self.cursor).unwrap();
            write!(f, "limit = {:#X}\n", self.limit).unwrap();
            write!(f, "line  = {}\n", self.line).unwrap();
qinsoon's avatar
qinsoon committed
317
318
            write!(f, "large cursor = {}\n", self.large_cursor).unwrap();
            write!(f, "large limit  = {}\n", self.large_limit).unwrap();
319
        }
qinsoon's avatar
qinsoon committed
320
        Ok(())
321
    }
322
}