treadmill.rs 14.4 KB
Newer Older
1
// Copyright 2017 The Australian National University
qinsoon's avatar
qinsoon committed
2
//
3 4 5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
qinsoon's avatar
qinsoon committed
6
//
7
//     http://www.apache.org/licenses/LICENSE-2.0
qinsoon's avatar
qinsoon committed
8
//
9 10 11 12 13 14
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15 16 17 18
#![allow(dead_code)]

use utils::Address;
use utils::mem::memmap;
19 20
use common::AddressMap;

21 22
use objectmodel;

23
use std::sync::Arc;
24 25
use std::fmt;
use std::sync::Mutex;
26

qinsoon's avatar
qinsoon committed
27 28
const SPACE_ALIGN: usize = 1 << 19;
const BLOCK_SIZE: usize = 1 << 12; // 4kb
29

qinsoon's avatar
qinsoon committed
30
const TRACE_TREADMILL: bool = false;
31

32 33
#[repr(C)]
pub struct FreeListSpace {
qinsoon's avatar
qinsoon committed
34 35
    start: Address,
    end: Address,
36

qinsoon's avatar
qinsoon committed
37 38
    pub alloc_map: Arc<AddressMap<u8>>,
    pub trace_map: Arc<AddressMap<u8>>,
39 40

    #[allow(dead_code)]
qinsoon's avatar
qinsoon committed
41
    mmap: memmap::MmapMut,
42

43
    treadmill: Mutex<Treadmill>
44 45 46 47
}

impl FreeListSpace {
    pub fn new(space_size: usize) -> FreeListSpace {
qinsoon's avatar
qinsoon committed
48 49 50 51 52 53
        let mut anon_mmap: memmap::MmapMut =
            match memmap::MmapMut::map_anon(space_size + SPACE_ALIGN) {
                Ok(m) => m,
                Err(_) => panic!("failed to call mmap")
            };
        let start: Address = Address::from_ptr::<u8>(anon_mmap.as_mut_ptr()).align_up(SPACE_ALIGN);
qinsoon's avatar
qinsoon committed
54
        let end: Address = start + space_size;
55

56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
        let trace_map = AddressMap::new(start, end);
        let alloc_map = AddressMap::new(start, end);
        if cfg!(debug_assertions) {
            trace_map.init_all(0);
            alloc_map.init_all(0);
        }

        let treadmill = Treadmill::new(start, end);

        FreeListSpace {
            start: start,
            end: end,
            alloc_map: Arc::new(alloc_map),
            trace_map: Arc::new(trace_map),
            mmap: anon_mmap,
71
            treadmill: Mutex::new(treadmill)
72 73 74 75 76 77 78
        }
    }

    pub fn alloc(&self, size: usize, align: usize) -> Address {
        // every block is 'BLOCK_SIZE' aligned, usually we do not need to align
        assert!(BLOCK_SIZE % align == 0);

79 80
        let size = size + objectmodel::OBJECT_HEADER_SIZE;

81 82 83 84 85 86
        let blocks_needed = if size % BLOCK_SIZE == 0 {
            size / BLOCK_SIZE
        } else {
            size / BLOCK_SIZE + 1
        };

87
        if TRACE_TREADMILL {
88 89
            trace!("---before allocation---");
            trace!("{}", self);
90
        }
91

92
        trace!("requiring {} bytes ({} blocks)", size, blocks_needed);
93 94 95 96 97
        let res = {
            let mut treadmill = self.treadmill.lock().unwrap();
            treadmill.alloc_blocks(blocks_needed)
        };

98
        if TRACE_TREADMILL {
99 100
            trace!("---after allocation---");
            trace!("{}", self);
101
        }
102

103 104 105
        if res.is_zero() {
            res
        } else {
106
            res + (-objectmodel::OBJECT_HEADER_OFFSET)
107
        }
108 109
    }

110 111
    #[inline(always)]
    #[cfg(feature = "use-sidemap")]
qinsoon's avatar
qinsoon committed
112
    #[allow(unused_variables)]
113
    fn is_traced(&self, addr: Address, mark_state: u8) -> bool {
114
        unimplemented!()
115 116 117 118 119
    }

    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    fn is_traced(&self, addr: Address, mark_state: u8) -> bool {
qinsoon's avatar
qinsoon committed
120
        objectmodel::is_traced(unsafe { addr.to_object_reference() }, mark_state)
121 122
    }

123
    pub fn sweep(&self) {
124
        trace!("going to sweep treadmill space");
125 126 127 128 129 130 131
        if TRACE_TREADMILL {
            trace!("{}", self);
        }

        let mut nodes_scanned = 0;
        let mut free_nodes_scanned = 0;
        let mut alive_nodes_scanned = 0;
132

133
        let mut treadmill = self.treadmill.lock().unwrap();
134

135 136
        {
            let mark_state = objectmodel::load_mark_state();
137

138
            let from = treadmill.from;
qinsoon's avatar
qinsoon committed
139
            let to = treadmill.to;
140

141 142 143 144 145
            let total_nodes = treadmill.spaces[from].len();
            let mut i = 0;
            while nodes_scanned < total_nodes {
                trace!("scanning {}", treadmill.spaces[from][i]);
                let addr = treadmill.spaces[from][i].payload;
146

147
                nodes_scanned += 1;
148

149 150 151
                let traced = self.is_traced(addr, mark_state);

                if traced {
152 153
                    // this object is alive
                    alive_nodes_scanned += 1;
154

155 156
                    // move to tospace
                    let node = treadmill.spaces[from].remove(i);
157
                    treadmill.spaces[to].push(node);
158

159
                    trace!("is alive");
160

qinsoon's avatar
qinsoon committed
161
                // do not increment i
162
                } else {
163 164
                    free_nodes_scanned += 1;
                    i += 1;
165 166 167
                }
            }

168 169
            // check if we have any free nodes
            if free_nodes_scanned == 0 && treadmill.spaces[treadmill.to].len() == 0 {
170 171 172 173
                println!("didnt free up any memory in treadmill space");
                panic!("we ran out of memory in large object space")
            }
        }
174

175 176 177 178 179 180
        // next allocation in to_space will starts from alive_nodes_scanned
        treadmill.from_space_next = alive_nodes_scanned;

        // flip
        if treadmill.from == 0 {
            treadmill.from = 1;
qinsoon's avatar
qinsoon committed
181
            treadmill.to = 0;
182 183
        } else {
            treadmill.from = 0;
qinsoon's avatar
qinsoon committed
184
            treadmill.to = 1;
185 186
        }

qinsoon's avatar
qinsoon committed
187 188
        // sort from_space from from_space_next so contiguous blocks are together
        // (easier to allocate)
189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
        let from = treadmill.from;
        let ref mut from_space = treadmill.spaces[from];

        // we do not care about alive nodes in from space
        for start in alive_nodes_scanned..from_space.len() {
            let first = {
                let mut ret = start;
                for i in start..from_space.len() {
                    if from_space[i].payload < from_space[ret].payload {
                        ret = i;
                    }
                }
                ret
            };

            if first != start {
                let block = from_space.remove(first);
                from_space.insert(start, block);
            }
        }

210 211 212 213 214 215
        if cfg!(debug_assertions) {
            debug!("---tread mill space---");
            debug!("total nodes scanned: {}", nodes_scanned);
            debug!("alive nodes scanned: {}", alive_nodes_scanned);
            debug!("free  nodes scanned: {}", free_nodes_scanned);
        }
216 217 218
    }
}

219 220 221 222 223 224 225 226 227 228 229 230 231 232
use heap::Space;

impl Space for FreeListSpace {
    #[inline(always)]
    fn start(&self) -> Address {
        self.start
    }

    #[inline(always)]
    fn end(&self) -> Address {
        self.end
    }

    #[inline(always)]
233 234
    fn is_valid_object(&self, addr: Address) -> bool {
        true
235 236 237 238 239 240 241 242 243 244 245
    }
}

unsafe impl Sync for FreeListSpace {}
unsafe impl Send for FreeListSpace {}

impl fmt::Display for FreeListSpace {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "FreeListSpace\n").unwrap();
        write!(f, "range={:#X} ~ {:#X}\n", self.start, self.end).unwrap();

qinsoon's avatar
qinsoon committed
246
        let treadmill: &Treadmill = &self.treadmill.lock().unwrap();
247 248
        write!(f, "treadmill: {}", treadmill)
    }
249 250
}

qinsoon's avatar
qinsoon committed
251 252
struct Treadmill {
    from_space_next: usize, // next available node in from_space
253
    from: usize,
qinsoon's avatar
qinsoon committed
254
    to: usize,
255
    spaces: [Vec<TreadmillNode>; 2]
256 257 258 259
}

impl Treadmill {
    fn new(start: Address, end: Address) -> Treadmill {
260
        let half_space = start + ((end - start) / 2);
261

262
        let mut from_space = vec![];
qinsoon's avatar
qinsoon committed
263
        let mut to_space = vec![];
264

265 266 267
        let mut addr = start;

        while addr < half_space {
268
            from_space.push(TreadmillNode::new(addr));
269
            addr = addr + BLOCK_SIZE;
270
        }
271 272

        while addr < end {
273
            to_space.push(TreadmillNode::new(addr));
274
            addr = addr + BLOCK_SIZE;
275 276
        }

277
        Treadmill {
278 279
            from_space_next: 0,
            from: 0,
qinsoon's avatar
qinsoon committed
280
            to: 1,
281
            spaces: [from_space, to_space]
282 283 284 285
        }
    }

    fn alloc_blocks(&mut self, n_blocks: usize) -> Address {
286 287 288
        match self.find_contiguous_blocks(n_blocks) {
            Some(start) => {
                if TRACE_TREADMILL {
qinsoon's avatar
qinsoon committed
289 290 291 292 293
                    trace!(
                        "found contiguous {} blocks, starting from {}",
                        n_blocks,
                        start
                    );
294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349
                }

                let ref mut from_space = self.spaces[self.from];

                // zero blocks
                let return_address = from_space[start].payload;
                Treadmill::zeroing_blocks(return_address, n_blocks);

                if start != self.from_space_next {
                    // contiguous blocks are not next few ones
                    // we need to move the blocks

                    // take the allocated blocks out
                    let new_allocated = {
                        let mut ret = vec![];

                        for _ in 0..n_blocks {
                            let block = from_space.remove(start);

                            if TRACE_TREADMILL {
                                trace!("remove allocated block from from_space: {}", block);
                                trace!("from space: ");
                                for i in 0..from_space.len() {
                                    trace!("{}", from_space[i]);
                                }
                            }

                            ret.push(block);
                        }

                        ret
                    };

                    // insert back and mov cursor
                    let mut cursor = self.from_space_next;
                    for block in new_allocated {
                        if TRACE_TREADMILL {
                            trace!("insert block {} to from_space at {}", block, cursor);
                        }

                        from_space.insert(cursor, block);

                        if TRACE_TREADMILL {
                            trace!("from space: ");
                            for i in 0..from_space.len() {
                                trace!("{}", from_space[i]);
                            }
                        }

                        cursor += 1;
                    }
                    self.from_space_next = cursor;
                } else {
                    // just move cursor
                    self.from_space_next += n_blocks;
                }
350

351
                return_address
352
            }
353 354 355 356
            None => {
                if TRACE_TREADMILL {
                    trace!("cannot find {} contiguous blocks", n_blocks);
                }
qinsoon's avatar
qinsoon committed
357
                unsafe { Address::zero() }
358 359 360
            }
        }
    }
361

362 363
    fn find_contiguous_blocks(&mut self, n_blocks: usize) -> Option<usize> {
        // e.g. we have 10 blocks, and we require 3 blocks,
qinsoon's avatar
qinsoon committed
364 365
        // we wont be able to find contiguous blocks if starting block is more than 7
        // (only 8, 9 might be available)
366 367 368 369 370 371 372 373 374 375
        // 7 = 10 (total) - 3 (required)
        // Rust range is exclusive of the end, so we use (total - required + 1)

        // we can always assume contiguous blocks are arrange next to each other
        // since we will do a sort after GC

        // we do not have enough blocks (no need to check if they are contiguous
        if self.from_space_next + n_blocks > self.spaces[self.from].len() {
            return None;
        }
376

377 378 379 380 381 382 383 384 385 386 387 388 389 390 391
        for i in self.from_space_next..(self.spaces[self.from].len() - n_blocks + 1) {
            if self.has_contiguous_blocks_starting_at(n_blocks, i) {
                return Some(i);
            }
        }

        None
    }

    fn has_contiguous_blocks_starting_at(&mut self, n_blocks: usize, start: usize) -> bool {
        let ref from_space = self.spaces[self.from];

        if start + n_blocks > from_space.len() {
            // if we have fewer blocks than required, it is impossible to find required blocks
            false
392
        } else {
393 394 395 396 397
            // we need to check if next n_blocks are contiguous
            // e.g. we have 10 blocks, and we want to check if we have 3 contiguous blocks from #7
            // we need to check if 7&8, 8&9 (cursor is 7, and 8)
            let mut cursor = start;
            while cursor < start + n_blocks - 1 {
398
                if from_space[cursor].payload + BLOCK_SIZE != from_space[cursor + 1].payload {
399 400 401 402 403 404 405
                    return false;
                }

                cursor += 1;
            }

            true
406
        }
407
    }
408

409
    fn zeroing_blocks(start: Address, n_blocks: usize) {
410 411 412
        use utils::mem::memsec;

        unsafe {
413
            memsec::memzero(start.to_ptr_mut::<u8>(), BLOCK_SIZE * n_blocks);
414 415
        }
    }
416 417
}

418 419
impl fmt::Display for Treadmill {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
420
        write!(f, "next: {}\n", self.from_space_next).unwrap();
421 422 423 424
        write!(f, "from:").unwrap();
        for i in 0..self.spaces[self.from].len() {
            write!(f, "{}->", self.spaces[self.from][i]).unwrap();
        }
425
        write!(f, "\n").unwrap();
426 427 428
        write!(f, "to:").unwrap();
        for i in 0..self.spaces[self.to].len() {
            write!(f, "{}->", self.spaces[self.to][i]).unwrap();
429
        }
430
        Ok(())
431 432 433 434
    }
}

struct TreadmillNode {
435
    payload: Address
436 437 438
}

impl TreadmillNode {
439
    fn new(addr: Address) -> TreadmillNode {
qinsoon's avatar
qinsoon committed
440
        TreadmillNode { payload: addr }
441
    }
442
}
443

444

445 446 447
impl fmt::Display for TreadmillNode {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "[{}]", self.payload)
448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use super::BLOCK_SIZE;

    #[test]
    fn test_new_treadmill_space() {
        let space = FreeListSpace::new(BLOCK_SIZE * 10);

        println!("{}", space);
    }

    #[test]
    fn test_treadmill_alloc() {
465
        let space = FreeListSpace::new(BLOCK_SIZE * 20);
466 467

        for i in 0..10 {
468
            let ret = space.alloc(BLOCK_SIZE / 2, 8);
469
            println!("Allocation{}: {}", i, ret);
470
            assert!(!ret.is_zero());
471 472 473 474 475
        }
    }

    #[test]
    fn test_treadmill_alloc_spanblock() {
476
        let space = FreeListSpace::new(BLOCK_SIZE * 20);
477 478

        for i in 0..5 {
479
            let ret = space.alloc(BLOCK_SIZE + BLOCK_SIZE / 2, 8);
480
            println!("Allocation{}: {}", i, ret);
481
            assert!(!ret.is_zero());
482 483 484 485
        }
    }

    #[test]
486 487 488 489 490
    fn test_treadmill_sweep() {
        let space = FreeListSpace::new(BLOCK_SIZE * 20);

        for i in 0..5 {
            let ret = space.alloc(BLOCK_SIZE + BLOCK_SIZE / 2, 8);
491
            println!("Allocation{}: {}", i, ret);
492
            assert!(!ret.is_zero());
493 494
        }
    }
495
}