treadmill.rs 14.6 KB
Newer Older
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
1
// Copyright 2017 The Australian National University
qinsoon's avatar
qinsoon committed
2
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
3 4 5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
qinsoon's avatar
qinsoon committed
6
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
7
//     http://www.apache.org/licenses/LICENSE-2.0
qinsoon's avatar
qinsoon committed
8
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
9 10 11 12 13 14
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

qinsoon's avatar
qinsoon committed
15 16 17 18
#![allow(dead_code)]

use utils::Address;
use utils::mem::memmap;
19 20
use common::AddressMap;

21 22
use objectmodel;

qinsoon's avatar
qinsoon committed
23
use std::sync::Arc;
24 25
use std::fmt;
use std::sync::Mutex;
qinsoon's avatar
qinsoon committed
26

qinsoon's avatar
qinsoon committed
27 28
const SPACE_ALIGN: usize = 1 << 19;
const BLOCK_SIZE: usize = 1 << 12; // 4kb
qinsoon's avatar
qinsoon committed
29

qinsoon's avatar
qinsoon committed
30
const TRACE_TREADMILL: bool = false;
31

qinsoon's avatar
qinsoon committed
32 33
#[repr(C)]
pub struct FreeListSpace {
qinsoon's avatar
qinsoon committed
34 35
    start: Address,
    end: Address,
qinsoon's avatar
qinsoon committed
36

qinsoon's avatar
qinsoon committed
37 38
    pub alloc_map: Arc<AddressMap<u8>>,
    pub trace_map: Arc<AddressMap<u8>>,
qinsoon's avatar
qinsoon committed
39 40

    #[allow(dead_code)]
41
    mmap: memmap::MmapMut,
qinsoon's avatar
qinsoon committed
42

43
    treadmill: Mutex<Treadmill>
qinsoon's avatar
qinsoon committed
44 45 46 47
}

impl FreeListSpace {
    pub fn new(space_size: usize) -> FreeListSpace {
48 49 50 51 52 53
        let mut anon_mmap: memmap::MmapMut =
            match memmap::MmapMut::map_anon(space_size + SPACE_ALIGN) {
                Ok(m) => m,
                Err(_) => panic!("failed to call mmap")
            };
        let start: Address = Address::from_ptr::<u8>(anon_mmap.as_mut_ptr()).align_up(SPACE_ALIGN);
qinsoon's avatar
qinsoon committed
54
        let end: Address = start + space_size;
qinsoon's avatar
qinsoon committed
55

56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
        let trace_map = AddressMap::new(start, end);
        let alloc_map = AddressMap::new(start, end);
        if cfg!(debug_assertions) {
            trace_map.init_all(0);
            alloc_map.init_all(0);
        }

        let treadmill = Treadmill::new(start, end);

        FreeListSpace {
            start: start,
            end: end,
            alloc_map: Arc::new(alloc_map),
            trace_map: Arc::new(trace_map),
            mmap: anon_mmap,
71
            treadmill: Mutex::new(treadmill)
72 73 74 75 76 77 78
        }
    }

    pub fn alloc(&self, size: usize, align: usize) -> Address {
        // every block is 'BLOCK_SIZE' aligned, usually we do not need to align
        assert!(BLOCK_SIZE % align == 0);

79 80
        let size = size + objectmodel::OBJECT_HEADER_SIZE;

81 82 83 84 85 86
        let blocks_needed = if size % BLOCK_SIZE == 0 {
            size / BLOCK_SIZE
        } else {
            size / BLOCK_SIZE + 1
        };

87
        if TRACE_TREADMILL {
88 89
            trace!("---before allocation---");
            trace!("{}", self);
90
        }
91

92
        trace!("requiring {} bytes ({} blocks)", size, blocks_needed);
93 94 95 96 97
        let res = {
            let mut treadmill = self.treadmill.lock().unwrap();
            treadmill.alloc_blocks(blocks_needed)
        };

98
        if TRACE_TREADMILL {
99 100
            trace!("---after allocation---");
            trace!("{}", self);
101
        }
102

103 104 105
        if res.is_zero() {
            res
        } else {
106
            res + (-objectmodel::OBJECT_HEADER_OFFSET)
107
        }
108 109
    }

110 111 112
    #[inline(always)]
    #[cfg(feature = "use-sidemap")]
    fn is_traced(&self, addr: Address, mark_state: u8) -> bool {
qinsoon's avatar
qinsoon committed
113 114 115 116
        objectmodel::is_traced(
            self.trace_map(),
            self.start,
            unsafe { addr.to_object_reference() },
117
            mark_state
qinsoon's avatar
qinsoon committed
118
        )
119 120 121 122 123
    }

    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    fn is_traced(&self, addr: Address, mark_state: u8) -> bool {
qinsoon's avatar
qinsoon committed
124
        objectmodel::is_traced(unsafe { addr.to_object_reference() }, mark_state)
125 126
    }

127
    pub fn sweep(&self) {
128
        trace!("going to sweep treadmill space");
129 130 131 132 133 134 135
        if TRACE_TREADMILL {
            trace!("{}", self);
        }

        let mut nodes_scanned = 0;
        let mut free_nodes_scanned = 0;
        let mut alive_nodes_scanned = 0;
136

137
        let mut treadmill = self.treadmill.lock().unwrap();
138

139 140
        {
            let mark_state = objectmodel::load_mark_state();
141

142
            let from = treadmill.from;
qinsoon's avatar
qinsoon committed
143
            let to = treadmill.to;
144

145 146 147 148 149
            let total_nodes = treadmill.spaces[from].len();
            let mut i = 0;
            while nodes_scanned < total_nodes {
                trace!("scanning {}", treadmill.spaces[from][i]);
                let addr = treadmill.spaces[from][i].payload;
150

151
                nodes_scanned += 1;
152

153 154 155
                let traced = self.is_traced(addr, mark_state);

                if traced {
156 157
                    // this object is alive
                    alive_nodes_scanned += 1;
158

159 160
                    // move to tospace
                    let node = treadmill.spaces[from].remove(i);
161
                    treadmill.spaces[to].push(node);
162

163
                    trace!("is alive");
164

qinsoon's avatar
qinsoon committed
165
                // do not increment i
166
                } else {
167 168
                    free_nodes_scanned += 1;
                    i += 1;
169 170 171
                }
            }

172 173
            // check if we have any free nodes
            if free_nodes_scanned == 0 && treadmill.spaces[treadmill.to].len() == 0 {
174 175 176 177
                println!("didnt free up any memory in treadmill space");
                panic!("we ran out of memory in large object space")
            }
        }
178

179 180 181 182 183 184
        // next allocation in to_space will starts from alive_nodes_scanned
        treadmill.from_space_next = alive_nodes_scanned;

        // flip
        if treadmill.from == 0 {
            treadmill.from = 1;
qinsoon's avatar
qinsoon committed
185
            treadmill.to = 0;
186 187
        } else {
            treadmill.from = 0;
qinsoon's avatar
qinsoon committed
188
            treadmill.to = 1;
189 190
        }

qinsoon's avatar
qinsoon committed
191 192
        // sort from_space from from_space_next so contiguous blocks are together
        // (easier to allocate)
193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
        let from = treadmill.from;
        let ref mut from_space = treadmill.spaces[from];

        // we do not care about alive nodes in from space
        for start in alive_nodes_scanned..from_space.len() {
            let first = {
                let mut ret = start;
                for i in start..from_space.len() {
                    if from_space[i].payload < from_space[ret].payload {
                        ret = i;
                    }
                }
                ret
            };

            if first != start {
                let block = from_space.remove(first);
                from_space.insert(start, block);
            }
        }

214 215 216 217 218 219
        if cfg!(debug_assertions) {
            debug!("---tread mill space---");
            debug!("total nodes scanned: {}", nodes_scanned);
            debug!("alive nodes scanned: {}", alive_nodes_scanned);
            debug!("free  nodes scanned: {}", free_nodes_scanned);
        }
qinsoon's avatar
qinsoon committed
220 221 222
    }
}

223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254
use heap::Space;

impl Space for FreeListSpace {
    #[inline(always)]
    fn start(&self) -> Address {
        self.start
    }

    #[inline(always)]
    fn end(&self) -> Address {
        self.end
    }

    #[inline(always)]
    fn alloc_map(&self) -> *mut u8 {
        self.alloc_map.ptr
    }

    #[inline(always)]
    fn trace_map(&self) -> *mut u8 {
        self.trace_map.ptr
    }
}

unsafe impl Sync for FreeListSpace {}
unsafe impl Send for FreeListSpace {}

impl fmt::Display for FreeListSpace {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "FreeListSpace\n").unwrap();
        write!(f, "range={:#X} ~ {:#X}\n", self.start, self.end).unwrap();

qinsoon's avatar
qinsoon committed
255
        let treadmill: &Treadmill = &self.treadmill.lock().unwrap();
256 257
        write!(f, "treadmill: {}", treadmill)
    }
qinsoon's avatar
qinsoon committed
258 259
}

qinsoon's avatar
qinsoon committed
260 261
struct Treadmill {
    from_space_next: usize, // next available node in from_space
262
    from: usize,
qinsoon's avatar
qinsoon committed
263
    to: usize,
264
    spaces: [Vec<TreadmillNode>; 2]
265 266 267 268
}

impl Treadmill {
    fn new(start: Address, end: Address) -> Treadmill {
269
        let half_space = start + ((end - start) / 2);
qinsoon's avatar
qinsoon committed
270

271
        let mut from_space = vec![];
qinsoon's avatar
qinsoon committed
272
        let mut to_space = vec![];
273

274 275 276
        let mut addr = start;

        while addr < half_space {
277
            from_space.push(TreadmillNode::new(addr));
278
            addr = addr + BLOCK_SIZE;
279
        }
qinsoon's avatar
qinsoon committed
280 281

        while addr < end {
282
            to_space.push(TreadmillNode::new(addr));
283
            addr = addr + BLOCK_SIZE;
qinsoon's avatar
qinsoon committed
284 285
        }

286
        Treadmill {
287 288
            from_space_next: 0,
            from: 0,
qinsoon's avatar
qinsoon committed
289
            to: 1,
290
            spaces: [from_space, to_space]
291 292 293 294
        }
    }

    fn alloc_blocks(&mut self, n_blocks: usize) -> Address {
295 296 297
        match self.find_contiguous_blocks(n_blocks) {
            Some(start) => {
                if TRACE_TREADMILL {
qinsoon's avatar
qinsoon committed
298 299 300 301 302
                    trace!(
                        "found contiguous {} blocks, starting from {}",
                        n_blocks,
                        start
                    );
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358
                }

                let ref mut from_space = self.spaces[self.from];

                // zero blocks
                let return_address = from_space[start].payload;
                Treadmill::zeroing_blocks(return_address, n_blocks);

                if start != self.from_space_next {
                    // contiguous blocks are not next few ones
                    // we need to move the blocks

                    // take the allocated blocks out
                    let new_allocated = {
                        let mut ret = vec![];

                        for _ in 0..n_blocks {
                            let block = from_space.remove(start);

                            if TRACE_TREADMILL {
                                trace!("remove allocated block from from_space: {}", block);
                                trace!("from space: ");
                                for i in 0..from_space.len() {
                                    trace!("{}", from_space[i]);
                                }
                            }

                            ret.push(block);
                        }

                        ret
                    };

                    // insert back and mov cursor
                    let mut cursor = self.from_space_next;
                    for block in new_allocated {
                        if TRACE_TREADMILL {
                            trace!("insert block {} to from_space at {}", block, cursor);
                        }

                        from_space.insert(cursor, block);

                        if TRACE_TREADMILL {
                            trace!("from space: ");
                            for i in 0..from_space.len() {
                                trace!("{}", from_space[i]);
                            }
                        }

                        cursor += 1;
                    }
                    self.from_space_next = cursor;
                } else {
                    // just move cursor
                    self.from_space_next += n_blocks;
                }
359

360
                return_address
361
            }
362 363 364 365
            None => {
                if TRACE_TREADMILL {
                    trace!("cannot find {} contiguous blocks", n_blocks);
                }
qinsoon's avatar
qinsoon committed
366
                unsafe { Address::zero() }
367 368 369
            }
        }
    }
370

371 372
    fn find_contiguous_blocks(&mut self, n_blocks: usize) -> Option<usize> {
        // e.g. we have 10 blocks, and we require 3 blocks,
qinsoon's avatar
qinsoon committed
373 374
        // we wont be able to find contiguous blocks if starting block is more than 7
        // (only 8, 9 might be available)
375 376 377 378 379 380 381 382 383 384
        // 7 = 10 (total) - 3 (required)
        // Rust range is exclusive of the end, so we use (total - required + 1)

        // we can always assume contiguous blocks are arrange next to each other
        // since we will do a sort after GC

        // we do not have enough blocks (no need to check if they are contiguous
        if self.from_space_next + n_blocks > self.spaces[self.from].len() {
            return None;
        }
385

386 387 388 389 390 391 392 393 394 395 396 397 398 399 400
        for i in self.from_space_next..(self.spaces[self.from].len() - n_blocks + 1) {
            if self.has_contiguous_blocks_starting_at(n_blocks, i) {
                return Some(i);
            }
        }

        None
    }

    fn has_contiguous_blocks_starting_at(&mut self, n_blocks: usize, start: usize) -> bool {
        let ref from_space = self.spaces[self.from];

        if start + n_blocks > from_space.len() {
            // if we have fewer blocks than required, it is impossible to find required blocks
            false
401
        } else {
402 403 404 405 406
            // we need to check if next n_blocks are contiguous
            // e.g. we have 10 blocks, and we want to check if we have 3 contiguous blocks from #7
            // we need to check if 7&8, 8&9 (cursor is 7, and 8)
            let mut cursor = start;
            while cursor < start + n_blocks - 1 {
407
                if from_space[cursor].payload + BLOCK_SIZE != from_space[cursor + 1].payload {
408 409 410 411 412 413 414
                    return false;
                }

                cursor += 1;
            }

            true
415
        }
qinsoon's avatar
qinsoon committed
416
    }
417

418
    fn zeroing_blocks(start: Address, n_blocks: usize) {
419 420 421
        use utils::mem::memsec;

        unsafe {
422
            memsec::memzero(start.to_ptr_mut::<u8>(), BLOCK_SIZE * n_blocks);
423 424
        }
    }
qinsoon's avatar
qinsoon committed
425 426
}

427 428
impl fmt::Display for Treadmill {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
429
        write!(f, "next: {}\n", self.from_space_next).unwrap();
430 431 432 433
        write!(f, "from:").unwrap();
        for i in 0..self.spaces[self.from].len() {
            write!(f, "{}->", self.spaces[self.from][i]).unwrap();
        }
434
        write!(f, "\n").unwrap();
435 436 437
        write!(f, "to:").unwrap();
        for i in 0..self.spaces[self.to].len() {
            write!(f, "{}->", self.spaces[self.to][i]).unwrap();
438
        }
439
        Ok(())
440 441 442 443
    }
}

struct TreadmillNode {
444
    payload: Address
445 446 447
}

impl TreadmillNode {
448
    fn new(addr: Address) -> TreadmillNode {
qinsoon's avatar
qinsoon committed
449
        TreadmillNode { payload: addr }
450
    }
451
}
452

453

454 455 456
impl fmt::Display for TreadmillNode {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "[{}]", self.payload)
457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use super::BLOCK_SIZE;

    #[test]
    fn test_new_treadmill_space() {
        let space = FreeListSpace::new(BLOCK_SIZE * 10);

        println!("{}", space);
    }

    #[test]
    fn test_treadmill_alloc() {
474
        let space = FreeListSpace::new(BLOCK_SIZE * 20);
475 476

        for i in 0..10 {
477
            let ret = space.alloc(BLOCK_SIZE / 2, 8);
478
            println!("Allocation{}: {}", i, ret);
479
            assert!(!ret.is_zero());
480 481 482 483 484
        }
    }

    #[test]
    fn test_treadmill_alloc_spanblock() {
485
        let space = FreeListSpace::new(BLOCK_SIZE * 20);
486 487

        for i in 0..5 {
488
            let ret = space.alloc(BLOCK_SIZE + BLOCK_SIZE / 2, 8);
489
            println!("Allocation{}: {}", i, ret);
490
            assert!(!ret.is_zero());
491 492 493 494
        }
    }

    #[test]
495 496 497 498 499
    fn test_treadmill_sweep() {
        let space = FreeListSpace::new(BLOCK_SIZE * 20);

        for i in 0..5 {
            let ret = space.alloc(BLOCK_SIZE + BLOCK_SIZE / 2, 8);
500
            println!("Allocation{}: {}", i, ret);
501
            assert!(!ret.is_zero());
502 503
        }
    }
504
}