To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

immix_mutator.rs 10.9 KB
Newer Older
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
1
// Copyright 2017 The Australian National University
qinsoon's avatar
qinsoon committed
2
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
3
4
5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
qinsoon's avatar
qinsoon committed
6
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
7
//     http://www.apache.org/licenses/LICENSE-2.0
qinsoon's avatar
qinsoon committed
8
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
9
10
11
12
13
14
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

qinsoon's avatar
qinsoon committed
15
use heap::immix::immix_space::ImmixBlock;
16
17
use heap::immix::ImmixSpace;
use heap::*;
18
use objectmodel;
19
use std::*;
qinsoon's avatar
qinsoon committed
20
use utils::Address;
21
use utils::ByteSize;
22

qinsoon's avatar
qinsoon committed
23
const TRACE_ALLOC: bool = true;
24

25
#[repr(C)]
qinsoon's avatar
qinsoon committed
26
pub struct ImmixAllocator {
27
28
    // cursor might be invalid, but Option<Address> is expensive here
    // after every GC, we set both cursor and limit
qinsoon's avatar
qinsoon committed
29
30
31
    // to Address::zero() so that alloc will branch to slow path
    cursor: Address,
    limit: Address,
qinsoon's avatar
qinsoon committed
32
    line: usize,
qinsoon's avatar
qinsoon committed
33
    block: Option<Raw<ImmixBlock>>,
qinsoon's avatar
qinsoon committed
34
35
36
37
38
39

    large_cursor: Address,
    large_limit: Address,
    large_block: Option<Raw<ImmixBlock>>,

    space: Raw<ImmixSpace>,
40
    mutator: *mut Mutator
41
42
}

43
lazy_static! {
44
45
46
47
    pub static ref CURSOR_OFFSET: ByteSize =
        offset_of!(ImmixAllocator=>cursor).get_byte_offset();
    pub static ref LIMIT_OFFSET: ByteSize =
        offset_of!(ImmixAllocator=>limit).get_byte_offset();
48
49
}

qinsoon's avatar
qinsoon committed
50
51
impl Allocator for ImmixAllocator {
    fn reset_after_gc(&mut self) {
52
53
        self.reset();
    }
qinsoon's avatar
qinsoon committed
54

qinsoon's avatar
qinsoon committed
55
56
57
    fn prepare_for_gc(&mut self) {
        self.return_block(true);
        self.return_block(false);
58
    }
qinsoon's avatar
qinsoon committed
59

qinsoon's avatar
qinsoon committed
60
    fn set_mutator(&mut self, mutator: *mut Mutator) {
qinsoon's avatar
qinsoon committed
61
        self.mutator = mutator;
62
    }
qinsoon's avatar
qinsoon committed
63

qinsoon's avatar
qinsoon committed
64
    fn destroy(&mut self) {
qinsoon's avatar
qinsoon committed
65
66
        self.return_block(true);
        self.return_block(false);
67
    }
qinsoon's avatar
qinsoon committed
68

69
    #[inline(always)]
qinsoon's avatar
qinsoon committed
70
    fn alloc(&mut self, size: usize, align: usize) -> Address {
71
        trace!("immix_mutator::alloc({}, {}, {});", &self, size, align);
72
73
74
        // this part of code will slow down allocation
        let align = objectmodel::check_alignment(align);
        // end
qinsoon's avatar
qinsoon committed
75

qinsoon's avatar
qinsoon committed
76
77
78
79
80
81
        trace_if!(
            TRACE_ALLOC,
            "Mutator: fastpath alloc: size={}, align={}",
            size,
            align
        );
82

83
        let start = self.cursor.align_up(align);
84
        let end = start + size;
85

qinsoon's avatar
qinsoon committed
86
87
88
89
90
91
        trace_if!(
            TRACE_ALLOC,
            "Mutator: fastpath alloc: start=0x{:x}, end=0x{:x}",
            start,
            end
        );
92

93
        if end > self.limit {
qinsoon's avatar
qinsoon committed
94
            self.alloc_slow(size, align)
95
96
        } else {
            self.cursor = end;
qinsoon's avatar
qinsoon committed
97
98
99
            start
        }
    }
qinsoon's avatar
qinsoon committed
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
}

impl ImmixAllocator {
    fn reset(&mut self) -> () {
        unsafe {
            // should not use Address::zero() other than initialization
            self.cursor = Address::zero();
            self.limit = Address::zero();
            self.large_cursor = Address::zero();
            self.large_limit = Address::zero();
        }
        self.line = LINES_IN_BLOCK;
        self.block = None;
        self.large_block = None;
    }

    pub fn new(space: Raw<ImmixSpace>) -> ImmixAllocator {
        ImmixAllocator {
            cursor: unsafe { Address::zero() },
            limit: unsafe { Address::zero() },
            line: LINES_IN_BLOCK,
            block: None,
            large_cursor: unsafe { Address::zero() },
            large_limit: unsafe { Address::zero() },
            large_block: None,
            space,
126
            mutator: ptr::null_mut()
qinsoon's avatar
qinsoon committed
127
128
        }
    }
qinsoon's avatar
qinsoon committed
129
130

    #[inline(never)]
qinsoon's avatar
qinsoon committed
131
    pub fn alloc_slow(&mut self, size: usize, align: usize) -> Address {
132
        trace!("immix_mutator::alloc_slow({}, {}, {});", &self, size, align);
qinsoon's avatar
qinsoon committed
133
134
135
136
137
138
139
140
141
142
143
144
145
        if size > BYTES_IN_LINE {
            trace_if!(TRACE_ALLOC, "Mutator: overflow alloc()");
            self.overflow_alloc(size, align)
        } else {
            trace_if!(
                TRACE_ALLOC,
                "Mutator: fastpath alloc: try_alloc_from_local()"
            );
            self.try_alloc_from_local(size, align)
        }
    }

    #[inline(always)]
qinsoon's avatar
qinsoon committed
146
    pub fn post_alloc(&mut self, obj: Address, size: usize) {
qinsoon's avatar
qinsoon committed
147
148
149
150
151
152
153
        if size > BYTES_IN_LINE {
            let index = self.space.get_word_index(obj);
            let slot = self.space.get_gc_byte_slot(index);
            unsafe { slot.store(slot.load::<u8>() | GC_STRADDLE_BIT) }
        }
    }

qinsoon's avatar
qinsoon committed
154
    pub fn overflow_alloc(&mut self, size: usize, align: usize) -> Address {
155
        trace!("immix_mutator::overflow_alloc(self, {}, {});", size, align);
qinsoon's avatar
qinsoon committed
156
157
        let start = self.large_cursor.align_up(align);
        let end = start + size;
qinsoon's avatar
qinsoon committed
158

qinsoon's avatar
qinsoon committed
159
160
161
162
163
164
165
166
167
168
169
170
        trace_if!(
            TRACE_ALLOC,
            "Mutator: overflow alloc: start={}, end={}",
            start,
            end
        );

        if end > self.large_limit {
            self.alloc_from_global(size, align, true)
        } else {
            self.large_cursor = end;
            start
qinsoon's avatar
qinsoon committed
171
        }
172
    }
qinsoon's avatar
qinsoon committed
173

174
    #[inline(always)]
qinsoon's avatar
qinsoon committed
175
    pub fn init_object<T>(&mut self, addr: Address, encode: T) {
176
        trace!("init_object({}, _)", addr);
qinsoon's avatar
qinsoon committed
177
        let map_slot = ImmixSpace::get_type_byte_slot_static(addr);
qinsoon's avatar
qinsoon committed
178
179
180
        unsafe {
            map_slot.store(encode);
        }
181
    }
qinsoon's avatar
qinsoon committed
182

183
184
185
186
187
    pub fn try_alloc_from_local(
        &mut self,
        size: usize,
        align: usize
    ) -> Address {
188
        trace!(
189
190
            "immix_mutator::try_alloc_from_local({}, {}, {});",
            &self,
191
192
193
            size,
            align
        );
qinsoon's avatar
qinsoon committed
194
        if self.line < LINES_IN_BLOCK {
195
196
197
198
            let opt_next_available_line = {
                let cur_line = self.line;
                self.block().get_next_available_line(cur_line)
            };
qinsoon's avatar
qinsoon committed
199
200
201
202
203
            trace_if!(
                TRACE_ALLOC,
                "Mutator: alloc from local, next available line: {:?}",
                opt_next_available_line
            );
qinsoon's avatar
qinsoon committed
204

205
206
207
            match opt_next_available_line {
                Some(next_available_line) => {
                    // we can alloc from local blocks
208
209
210
                    let end_line = self
                        .block()
                        .get_next_unavailable_line(next_available_line);
qinsoon's avatar
qinsoon committed
211

212
213
                    self.cursor = self.block().mem_start()
                        + ((next_available_line as usize) << LOG_BYTES_IN_LINE);
214
215
                    self.limit = self.block().mem_start()
                        + ((end_line as usize) << LOG_BYTES_IN_LINE);
qinsoon's avatar
qinsoon committed
216
217
218
219
220
221
                    self.line = end_line;

                    unsafe {
                        self.cursor.memset(0, self.limit - self.cursor);
                    }

222
                    for line in next_available_line..end_line {
qinsoon's avatar
qinsoon committed
223
                        self.block().set_line_mark(line, LineMark::FreshAlloc);
224
                    }
225

qinsoon's avatar
qinsoon committed
226
                    self.alloc(size, align)
227
                }
228
                None => self.alloc_from_global(size, align, false)
229
230
231
            }
        } else {
            // we need to alloc from global space
qinsoon's avatar
qinsoon committed
232
            self.alloc_from_global(size, align, false)
233
234
        }
    }
qinsoon's avatar
qinsoon committed
235

236
237
238
239
240
241
    fn alloc_from_global(
        &mut self,
        size: usize,
        align: usize,
        request_large: bool
    ) -> Address {
242
        trace!(
243
244
            "immix_mutator::alloc_from_global({}, {}, {}, {});",
            &self,
245
246
247
248
            size,
            align,
            request_large
        );
qinsoon's avatar
qinsoon committed
249
250
        trace!("Mutator: slowpath: alloc_from_global()");
        self.return_block(request_large);
251
252
253

        loop {
            // check if yield
qinsoon's avatar
qinsoon committed
254
            unsafe { &mut *self.mutator }.yieldpoint();
qinsoon's avatar
qinsoon committed
255

256
257
            let new_block: Option<Raw<ImmixBlock>> =
                self.space.get_next_usable_block();
qinsoon's avatar
qinsoon committed
258

259
            match new_block {
qinsoon's avatar
qinsoon committed
260
                Some(b) => {
qinsoon's avatar
qinsoon committed
261
262
                    // zero the block - do not need to zero the block here
                    // we zero lines that get used in try_alloc_from_local()
qinsoon's avatar
qinsoon committed
263
                    //                    b.lazy_zeroing();
264

qinsoon's avatar
qinsoon committed
265
266
                    if request_large {
                        self.large_cursor = b.mem_start();
qinsoon's avatar
qinsoon committed
267
                        self.large_limit = b.mem_start() + BYTES_IN_BLOCK;
qinsoon's avatar
qinsoon committed
268
269
                        self.large_block = Some(b);

qinsoon's avatar
qinsoon committed
270
271
272
273
274
                        trace!(
                            "Mutator: slowpath: new large_block starting from 0x{:x}",
                            self.large_cursor
                        );

qinsoon's avatar
qinsoon committed
275
276
277
278
279
280
                        return self.alloc(size, align);
                    } else {
                        self.cursor = b.mem_start();
                        self.limit = b.mem_start();
                        self.line = 0;
                        self.block = Some(b);
281

qinsoon's avatar
qinsoon committed
282
283
284
285
                        trace!(
                            "Mutator: slowpath: new block starting from 0x{:x}",
                            self.cursor
                        );
286

qinsoon's avatar
qinsoon committed
287
                        return self.alloc(size, align);
qinsoon's avatar
qinsoon committed
288
                    }
qinsoon's avatar
qinsoon committed
289
290
291
292
                }
                None => {
                    continue;
                }
293
294
295
            }
        }
    }
qinsoon's avatar
qinsoon committed
296

qinsoon's avatar
qinsoon committed
297
    fn return_block(&mut self, request_large: bool) {
298
        trace!("immix_mutator::return_block(self, {});", request_large);
qinsoon's avatar
qinsoon committed
299
300
301
302
303
304
305
306
307
308
309
        if request_large {
            if self.large_block.is_some() {
                trace!(
                    "finishing large block {}",
                    self.large_block.as_ref().unwrap().addr()
                );
                self.space
                    .return_used_block(self.large_block.take().unwrap());
            }
        } else {
            if self.block.is_some() {
310
311
312
313
                trace!(
                    "finishing block {}",
                    self.block.as_ref().unwrap().addr()
                );
qinsoon's avatar
qinsoon committed
314
                self.space.return_used_block(self.block.take().unwrap());
qinsoon's avatar
qinsoon committed
315
            }
qinsoon's avatar
qinsoon committed
316
        }
317
    }
qinsoon's avatar
qinsoon committed
318

319
320
321
    fn block(&mut self) -> &mut ImmixBlock {
        self.block.as_mut().unwrap()
    }
qinsoon's avatar
qinsoon committed
322

323
    pub fn print_object(&self, obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
324
        ImmixAllocator::print_object_static(obj, length);
325
    }
qinsoon's avatar
qinsoon committed
326

327
    pub fn print_object_static(obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
328
        debug!("===Object {:#X} size: {} bytes===", obj, length);
329
        let mut cur_addr = obj;
330
        while cur_addr < obj + length {
qinsoon's avatar
qinsoon committed
331
332
333
            debug!("Address: {:#X}   {:#X}", cur_addr, unsafe {
                cur_addr.load::<u64>()
            });
334
            cur_addr = cur_addr + 8 as ByteSize;
335
        }
qinsoon's avatar
qinsoon committed
336
        debug!("----");
qinsoon's avatar
qinsoon committed
337
        debug!("=========");
338
339
340
    }
}

qinsoon's avatar
qinsoon committed
341
impl fmt::Display for ImmixAllocator {
342
343
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        if self.cursor.is_zero() {
qinsoon's avatar
qinsoon committed
344
            write!(f, "Mutator (not initialized)").unwrap();
345
346
347
348
349
        } else {
            write!(f, "Mutator:\n").unwrap();
            write!(f, "cursor= {:#X}\n", self.cursor).unwrap();
            write!(f, "limit = {:#X}\n", self.limit).unwrap();
            write!(f, "line  = {}\n", self.line).unwrap();
qinsoon's avatar
qinsoon committed
350
351
            write!(f, "large cursor = {}\n", self.large_cursor).unwrap();
            write!(f, "large limit  = {}\n", self.large_limit).unwrap();
352
        }
qinsoon's avatar
qinsoon committed
353
        Ok(())
354
    }
355
}