WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

immix_mutator.rs 11.8 KB
Newer Older
qinsoon's avatar
qinsoon committed
1 2 3 4
use heap::immix;
use heap::immix::ImmixSpace;
use heap::immix::immix_space::ImmixBlock;
use heap::gc;
5
use objectmodel;
qinsoon's avatar
qinsoon committed
6
use utils::Address;
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25

use std::*;
use std::sync::Arc;
use std::sync::RwLock;
use std::sync::atomic::{AtomicBool, Ordering};

const MAX_MUTATORS : usize = 1024;
lazy_static! {
    pub static ref MUTATORS : RwLock<Vec<Option<Arc<ImmixMutatorGlobal>>>> = {
        let mut ret = Vec::with_capacity(MAX_MUTATORS);
        for _ in 0..MAX_MUTATORS {
            ret.push(None);
        }
        RwLock::new(ret)
    };
    
    pub static ref N_MUTATORS : RwLock<usize> = RwLock::new(0);
}

26 27
const TRACE_ALLOC_FASTPATH : bool = true;

28 29 30 31 32 33 34
#[repr(C)]
pub struct ImmixMutatorLocal {
    id        : usize,
    
    // use raw pointer here instead of AddressMapTable
    // to avoid indirection in fast path    
    alloc_map : *mut u8,
35
    trace_map : *mut u8,
36 37 38 39 40 41 42 43 44 45 46 47 48 49
    space_start: Address,
    
    // cursor might be invalid, but Option<Address> is expensive here
    // after every GC, we set both cursor and limit
    // to Address::zero() so that alloc will branch to slow path    
    cursor    : Address,
    limit     : Address,
    line      : usize,
    
    // globally accessible per-thread fields
    pub global    : Arc<ImmixMutatorGlobal>,
    
    space     : Arc<ImmixSpace>,
    block     : Option<Box<ImmixBlock>>,
50 51

    mark_state: u8
52 53
}

54
lazy_static! {
55 56
    pub static ref CURSOR_OFFSET : usize = offset_of!(ImmixMutatorLocal=>cursor).get_byte_offset();
    pub static ref LIMIT_OFFSET  : usize = offset_of!(ImmixMutatorLocal=>limit).get_byte_offset();
57 58
}

59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
pub struct ImmixMutatorGlobal {
    take_yield : AtomicBool,
    still_blocked : AtomicBool
}

impl ImmixMutatorLocal {
    pub fn reset(&mut self) -> () {
        unsafe {
            // should not use Address::zero() other than initialization
            self.cursor = Address::zero();
            self.limit = Address::zero();
        }
        self.line = immix::LINES_IN_BLOCK;
        
        self.block = None;
    }
75 76 77 78 79

    pub fn reset_after_gc(&mut self) {
        self.reset();
        self.mark_state ^= 1;
    }
80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
    
    pub fn new(space : Arc<ImmixSpace>) -> ImmixMutatorLocal {
        let global = Arc::new(ImmixMutatorGlobal::new());
        
        let mut id_lock = N_MUTATORS.write().unwrap();
        {
            let mut mutators_lock = MUTATORS.write().unwrap();
            mutators_lock.remove(*id_lock);
            mutators_lock.insert(*id_lock, Some(global.clone()));
        }
        
        let ret = ImmixMutatorLocal {
            id : *id_lock,
            cursor: unsafe {Address::zero()}, limit: unsafe {Address::zero()}, line: immix::LINES_IN_BLOCK,
            block: None,
            alloc_map: space.alloc_map.ptr,
96
            trace_map: space.trace_map.ptr,
97 98
            space_start: space.start(),
            global: global,
99 100
            space: space,
            mark_state: objectmodel::INIT_MARK_STATE as u8
101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
        };
        *id_lock += 1;
        
        ret
    }
    
    pub fn destroy(&mut self) {
        {
            self.return_block();
        }
        
        let mut mutator_count_lock = N_MUTATORS.write().unwrap();
        
        let mut mutators_lock = MUTATORS.write().unwrap();
        mutators_lock.push(None);
        mutators_lock.swap_remove(self.id);
        
        *mutator_count_lock = *mutator_count_lock - 1;
        
        if cfg!(debug_assertions) {
qinsoon's avatar
qinsoon committed
121
            debug!("destroy mutator. Now live mutators = {}", *mutator_count_lock);
122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
        }
    }
    
    #[inline(always)]
    pub fn yieldpoint(&mut self) {
        if self.global.take_yield() {
            self.yieldpoint_slow();
        }
    }
    
    #[inline(never)]
    pub fn yieldpoint_slow(&mut self) {
        trace!("Mutator{}: yieldpoint triggered, slow path", self.id);
        gc::sync_barrier(self);
    }
    
    #[inline(always)]
    pub fn alloc(&mut self, size: usize, align: usize) -> Address {
140 141
        // this part of code will slow down allocation
        let align = objectmodel::check_alignment(align);
qinsoon's avatar
qinsoon committed
142
        let size = size + objectmodel::OBJECT_HEADER_SIZE;
143
        // end
qinsoon's avatar
qinsoon committed
144

145 146 147 148
        if TRACE_ALLOC_FASTPATH {
            trace!("Mutator{}: fastpath alloc: size={}, align={}", self.id, size, align);
        }

149 150
        let start = self.cursor.align_up(align);
        let end = start.plus(size);
151

152 153 154 155
        if TRACE_ALLOC_FASTPATH {
            trace!("Mutator{}: fastpath alloc: start=0x{:x}, end=0x{:x}", self.id, start, end);
        }

156
        if end > self.limit {
157
            let ret = self.try_alloc_from_local(size, align);
158 159 160
            if TRACE_ALLOC_FASTPATH {
                trace!("Mutator{}: fastpath alloc: try_alloc_from_local()=0x{:x}", self.id, ret);
            }
161 162 163 164 165 166 167 168
            
            if cfg!(debug_assertions) {
                if !ret.is_aligned_to(align) {
                    use std::process;
                    println!("wrong alignment on 0x{:x}, expected align: {}", ret, align);
                    process::exit(102);
                }
            }
169 170

            // this offset should be removed as well (for performance)
171
            ret.offset(-objectmodel::OBJECT_HEADER_OFFSET)
172
        } else {
173 174 175 176 177 178 179
            if cfg!(debug_assertions) {
                if !start.is_aligned_to(align) {
                    use std::process;
                    println!("wrong alignment on 0x{:x}, expected align: {}", start, align);
                    process::exit(102);
                }
            }
180 181
            self.cursor = end;
            
182
            start.offset(-objectmodel::OBJECT_HEADER_OFFSET)
183 184 185 186
        } 
    }
    
    #[inline(always)]
187 188
    #[cfg(feature = "use-sidemap")]
    pub fn init_object(&mut self, addr: Address, encode: u64) {
189 190 191 192 193 194
//        unsafe {
//            *self.alloc_map.offset((addr.diff(self.space_start) >> LOG_POINTER_SIZE) as isize) = encode as u8;
//            objectmodel::mark_as_untraced(self.trace_map, self.space_start, addr, self.mark_state);
//        }

        unimplemented!()
195
    }
196 197 198 199 200
    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    pub fn init_object(&mut self, addr: Address, encode: u64) {
        unsafe {
            addr.offset(objectmodel::OBJECT_HEADER_OFFSET).store(encode);
qinsoon's avatar
qinsoon committed
201 202 203 204 205 206 207 208 209 210 211 212 213 214
        }
    }

    #[inline(always)]
    #[cfg(feature = "use-sidemap")]
    pub fn init_hybrid(&mut self, addr: Address, encode: u64, len: u64) {
        unimplemented!()
    }
    #[inline(always)]
    #[cfg(not(feature = "use-sidemap"))]
    pub fn init_hybrid(&mut self, addr: Address, encode: u64, len: u64) {
        let encode = encode | ((len << objectmodel::SHR_HYBRID_LENGTH) & objectmodel::MASK_HYBRID_LENGTH);
        unsafe {
            addr.offset(objectmodel::OBJECT_HEADER_OFFSET).store(encode);
215
        }
216 217 218 219 220 221 222 223 224 225 226 227 228 229
    }
    
    #[inline(never)]
    pub fn try_alloc_from_local(&mut self, size : usize, align: usize) -> Address {
        if self.line < immix::LINES_IN_BLOCK {
            let opt_next_available_line = {
                let cur_line = self.line;
                self.block().get_next_available_line(cur_line)
            };
    
            match opt_next_available_line {
                Some(next_available_line) => {
                    // we can alloc from local blocks
                    let end_line = self.block().get_next_unavailable_line(next_available_line);
qinsoon's avatar
qinsoon committed
230

231 232 233 234 235 236 237 238 239
                    self.cursor = self.block().start().plus(next_available_line << immix::LOG_BYTES_IN_LINE);
                    self.limit  = self.block().start().plus(end_line << immix::LOG_BYTES_IN_LINE);
                    self.line   = end_line;
                    
                    self.cursor.memset(0, self.limit.diff(self.cursor));
                    
                    for line in next_available_line..end_line {
                        self.block().line_mark_table_mut().set(line, immix::LineMark::FreshAlloc);
                    }
240 241 242 243 244 245 246

                    // allocate fast path
                    let start = self.cursor.align_up(align);
                    let end = start.plus(size);

                    self.cursor = end;
                    start
247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269
                },
                None => {
                    self.alloc_from_global(size, align)
                }
            }
        } else {
            // we need to alloc from global space
            self.alloc_from_global(size, align)
        }
    }
    
    fn alloc_from_global(&mut self, size: usize, align: usize) -> Address {
        trace!("Mutator{}: slowpath: alloc_from_global", self.id);
        
        self.return_block();

        loop {
            // check if yield
            self.yieldpoint();
            
            let new_block : Option<Box<ImmixBlock>> = self.space.get_next_usable_block();
            
            match new_block {
qinsoon's avatar
qinsoon committed
270
                Some(b) => {
qinsoon's avatar
qinsoon committed
271 272 273
                    // zero the block - do not need to zero the block here
                    // we zero lines that get used in try_alloc_from_local()
//                    b.lazy_zeroing();
274

275 276 277 278
                    self.block    = Some(b);
                    self.cursor   = self.block().start();
                    self.limit    = self.block().start();
                    self.line     = 0;
279 280

                    trace!("Mutator{}: slowpath: new block starting from 0x{:x}", self.id, self.cursor);
281 282

                    return self.try_alloc_from_local(size, align);
283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298
                },
                None => {continue; }
            }
        }
    }
    
    pub fn prepare_for_gc(&mut self) {
        self.return_block();
    }
    
    pub fn id(&self) -> usize {
        self.id
    }

    fn return_block(&mut self) {
        if self.block.is_some() {
qinsoon's avatar
qinsoon committed
299
            trace!("finishing block {:?}", self.block.as_ref().unwrap());
qinsoon's avatar
qinsoon committed
300 301 302 303 304 305

            if cfg!(debug_assertions) {
                let block = self.block.as_ref().unwrap();
                ImmixMutatorLocal::sanity_check_finished_block(block);
            }

306 307 308
            self.space.return_used_block(self.block.take().unwrap());
        }        
    }
qinsoon's avatar
qinsoon committed
309 310

    #[cfg(feature = "use-sidemap")]
qinsoon's avatar
qinsoon committed
311
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
312 313 314 315 316
    fn sanity_check_finished_block(block: &ImmixBlock) {

    }

    #[cfg(not(feature = "use-sidemap"))]
qinsoon's avatar
qinsoon committed
317
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
318 319 320 321
    fn sanity_check_finished_block(block: &ImmixBlock) {

    }

322 323 324 325 326 327 328 329 330
    fn block(&mut self) -> &mut ImmixBlock {
        self.block.as_mut().unwrap()
    }
    
    pub fn print_object(&self, obj: Address, length: usize) {
        ImmixMutatorLocal::print_object_static(obj, length);
    }
    
    pub fn print_object_static(obj: Address, length: usize) {
qinsoon's avatar
qinsoon committed
331
        debug!("===Object {:#X} size: {} bytes===", obj, length);
332 333
        let mut cur_addr = obj;
        while cur_addr < obj.plus(length) {
qinsoon's avatar
qinsoon committed
334
            debug!("Address: {:#X}   {:#X}", cur_addr, unsafe {cur_addr.load::<u64>()});
335 336
            cur_addr = cur_addr.plus(8);
        }
qinsoon's avatar
qinsoon committed
337 338
        debug!("----");
        debug!("=========");        
339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378
    }
}

impl ImmixMutatorGlobal {
    pub fn new() -> ImmixMutatorGlobal {
        ImmixMutatorGlobal {
            take_yield: AtomicBool::new(false),
            still_blocked: AtomicBool::new(false)
        }
    }
    
    #[inline(always)]
    pub fn is_still_blocked(&self) -> bool {
        self.still_blocked.load(Ordering::SeqCst)
    }
    pub fn set_still_blocked(&self, b : bool) {
        self.still_blocked.store(b, Ordering::SeqCst);
    }
    
    pub fn set_take_yield(&self, b : bool) {
        self.take_yield.store(b, Ordering::SeqCst);
    }
    #[inline(always)]
    pub fn take_yield(&self) -> bool{
        self.take_yield.load(Ordering::SeqCst)
    }
}

impl fmt::Display for ImmixMutatorLocal {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        if self.cursor.is_zero() {
            write!(f, "Mutator (not initialized)")
        } else {
            write!(f, "Mutator:\n").unwrap();
            write!(f, "cursor= {:#X}\n", self.cursor).unwrap();
            write!(f, "limit = {:#X}\n", self.limit).unwrap();
            write!(f, "line  = {}\n", self.line).unwrap();
            write!(f, "block = {}", self.block.as_ref().unwrap())
        }
    }
379
}