GitLab will be upgraded on 31 Jan 2023 from 2.00 pm (AEDT) to 3.00 pm (AEDT). During the update, GitLab and Mattermost services will not be available. If you have any concerns with this, please talk to us at N110 (b) CSIT building.

mod.rs 25.3 KB
Newer Older
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
1
// Copyright 2017 The Australian National University
qinsoon's avatar
qinsoon committed
2
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
3
4
5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
qinsoon's avatar
qinsoon committed
6
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
7
//     http://www.apache.org/licenses/LICENSE-2.0
qinsoon's avatar
qinsoon committed
8
//
Isaac Oscar Gariano's avatar
Isaac Oscar Gariano committed
9
10
11
12
13
14
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

qinsoon's avatar
qinsoon committed
15
16
17
18
19
20
use heap::immix::MUTATORS;
use heap::immix::N_MUTATORS;
use heap::immix::ImmixMutatorLocal;
use heap::immix::ImmixSpace;
use heap::freelist::FreeListSpace;
use objectmodel;
21
use common::gctype::*;
22
use heap::Space;
qinsoon's avatar
qinsoon committed
23
use MY_GC;
qinsoon's avatar
qinsoon committed
24
25

use utils::{Address, ObjectReference};
qinsoon's avatar
qinsoon committed
26
use utils::POINTER_SIZE;
27

28
use std::sync::atomic::{AtomicIsize, AtomicBool, Ordering};
29
30
use std::sync::{Arc, Mutex, Condvar, RwLock};

qinsoon's avatar
qinsoon committed
31
use crossbeam::sync::chase_lev::*;
32
33
34
35
36
37
38
39
40
41
use std::sync::mpsc;
use std::sync::mpsc::channel;
use std::thread;

use std::sync::atomic;

lazy_static! {
    static ref STW_COND : Arc<(Mutex<usize>, Condvar)> = {
        Arc::new((Mutex::new(0), Condvar::new()))
    };
qinsoon's avatar
qinsoon committed
42

43
44
45
    static ref ROOTS : RwLock<Vec<ObjectReference>> = RwLock::new(vec![]);
}

qinsoon's avatar
qinsoon committed
46
pub static ENABLE_GC: AtomicBool = atomic::ATOMIC_BOOL_INIT;
47

qinsoon's avatar
qinsoon committed
48
49
static CONTROLLER: AtomicIsize = atomic::ATOMIC_ISIZE_INIT;
const NO_CONTROLLER: isize = -1;
50

qinsoon's avatar
qinsoon committed
51
pub fn init(n_gcthreads: usize) {
52
    CONTROLLER.store(NO_CONTROLLER, Ordering::SeqCst);
qinsoon's avatar
qinsoon committed
53
54

    GC_THREADS.store(n_gcthreads, Ordering::SeqCst);
55
56
57
58
}

pub fn trigger_gc() {
    trace!("Triggering GC...");
qinsoon's avatar
qinsoon committed
59

60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
    for mut m in MUTATORS.write().unwrap().iter_mut() {
        if m.is_some() {
            m.as_mut().unwrap().set_take_yield(true);
        }
    }
}

#[cfg(target_arch = "x86_64")]
#[link(name = "gc_clib_x64")]
extern "C" {
    fn immmix_get_stack_ptr() -> Address;
    pub fn set_low_water_mark();
    fn get_low_water_mark() -> Address;
    fn get_registers() -> *const Address;
    fn get_registers_count() -> i32;
}

77
78
79
80
81
82
83
84
85
86
#[cfg(target_arch = "aarch64")]
#[link(name = "gc_clib_aarch64")]
extern "C" {
    fn immmix_get_stack_ptr() -> Address;
    pub fn set_low_water_mark();
    fn get_low_water_mark() -> Address;
    fn get_registers() -> *const Address;
    fn get_registers_count() -> i32;
}

87
pub fn stack_scan() -> Vec<ObjectReference> {
88
    trace!("stack scanning...");
qinsoon's avatar
qinsoon committed
89
90
    let stack_ptr: Address = unsafe { immmix_get_stack_ptr() };

91
92
93
    if cfg!(debug_assertions) {
        if !stack_ptr.is_aligned_to(8) {
            use std::process;
qinsoon's avatar
qinsoon committed
94
95
96
97
98
            println!(
                "trying to scanning stack, however the current stack pointer is 0x{:x}, \
                 which is not aligned to 8bytes",
                stack_ptr
            );
99
100
101
            process::exit(102);
        }
    }
qinsoon's avatar
qinsoon committed
102
103
104

    let low_water_mark: Address = unsafe { get_low_water_mark() };

105
106
    let mut cursor = stack_ptr;
    let mut ret = vec![];
107

qinsoon's avatar
qinsoon committed
108
109
110
111
112
    let gccontext_guard = MY_GC.read().unwrap();
    let gccontext = gccontext_guard.as_ref().unwrap();

    let immix_space = gccontext.immix_space.clone();
    let lo_space = gccontext.lo_space.clone();
qinsoon's avatar
qinsoon committed
113

114
    while cursor < low_water_mark {
qinsoon's avatar
qinsoon committed
115
116
        let value: Address = unsafe { cursor.load::<Address>() };

117
        if immix_space.is_valid_object(value) || lo_space.is_valid_object(value) {
qinsoon's avatar
qinsoon committed
118
            ret.push(unsafe { value.to_object_reference() });
119
        }
qinsoon's avatar
qinsoon committed
120

121
        cursor = cursor + POINTER_SIZE;
122
    }
qinsoon's avatar
qinsoon committed
123

124
    let roots_from_stack = ret.len();
qinsoon's avatar
qinsoon committed
125
126
127
128

    let registers_count = unsafe { get_registers_count() };
    let registers = unsafe { get_registers() };

129
    for i in 0..registers_count {
qinsoon's avatar
qinsoon committed
130
131
132
133
        let value = unsafe { *registers.offset(i as isize) };

        if immix_space.is_valid_object(value) || lo_space.is_valid_object(value) {
            ret.push(unsafe { value.to_object_reference() });
134
135
        }
    }
qinsoon's avatar
qinsoon committed
136

137
    let roots_from_registers = ret.len() - roots_from_stack;
qinsoon's avatar
qinsoon committed
138
139
140
141
142
143
144

    trace!(
        "roots: {} from stack, {} from registers",
        roots_from_stack,
        roots_from_registers
    );

145
146
147
148
149
150
    ret
}

#[inline(never)]
pub fn sync_barrier(mutator: &mut ImmixMutatorLocal) {
    let controller_id = CONTROLLER.compare_and_swap(-1, mutator.id() as isize, Ordering::SeqCst);
qinsoon's avatar
qinsoon committed
151
152
153
154
155
156
157

    trace!(
        "Mutator{} saw the controller is {}",
        mutator.id(),
        controller_id
    );

158
159
    // prepare the mutator for gc - return current block (if it has)
    mutator.prepare_for_gc();
qinsoon's avatar
qinsoon committed
160

161
    // user thread call back to prepare for gc
qinsoon's avatar
qinsoon committed
162
163
    //    USER_THREAD_PREPARE_FOR_GC.read().unwrap()();

164
    if controller_id != NO_CONTROLLER {
165
166
167
168
169
170
        // scan its stack
        {
            let mut thread_roots = stack_scan();
            ROOTS.write().unwrap().append(&mut thread_roots);
        }

171
172
        // this thread will block
        block_current_thread(mutator);
qinsoon's avatar
qinsoon committed
173

174
        // reset current mutator
175
        mutator.reset_after_gc();
176
177
178
    } else {
        // this thread is controller
        // other threads should block
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195

        // init roots
        {
            let mut roots = ROOTS.write().unwrap();
            // clear existing roots (roots from last gc)
            roots.clear();

            // add explicity roots
            let gc = MY_GC.read().unwrap();
            for objref in gc.as_ref().unwrap().roots.iter() {
                roots.push(*objref);
            }

            // scan its stack
            let mut thread_roots = stack_scan();
            roots.append(&mut thread_roots);
        }
qinsoon's avatar
qinsoon committed
196

197
198
199
        // wait for all mutators to be blocked
        let &(ref lock, ref cvar) = &*STW_COND.clone();
        let mut count = 0;
qinsoon's avatar
qinsoon committed
200
201
202
203
204

        trace!(
            "expect {} mutators to park",
            *N_MUTATORS.read().unwrap() - 1
        );
205
        while count < *N_MUTATORS.read().unwrap() - 1 {
qinsoon's avatar
qinsoon committed
206
            let new_count = { *lock.lock().unwrap() };
207
            if new_count != count {
208
209
210
211
                count = new_count;
                trace!("count = {}", count);
            }
        }
qinsoon's avatar
qinsoon committed
212

213
        trace!("everyone stopped, gc will start");
qinsoon's avatar
qinsoon committed
214

215
216
        // roots->trace->sweep
        gc();
qinsoon's avatar
qinsoon committed
217

218
219
220
221
222
223
224
225
226
227
        // mutators will resume
        CONTROLLER.store(NO_CONTROLLER, Ordering::SeqCst);
        for mut t in MUTATORS.write().unwrap().iter_mut() {
            if t.is_some() {
                let t_mut = t.as_mut().unwrap();
                t_mut.set_take_yield(false);
                t_mut.set_still_blocked(false);
            }
        }
        // every mutator thread will reset themselves, so only reset current mutator here
228
        mutator.reset_after_gc();
229
230
231
232
233
234
235
236
237
238
239
240

        // resume
        {
            let mut count = lock.lock().unwrap();
            *count = 0;
            cvar.notify_all();
        }
    }
}

fn block_current_thread(mutator: &mut ImmixMutatorLocal) {
    trace!("Mutator{} blocked", mutator.id());
qinsoon's avatar
qinsoon committed
241

242
243
244
    let &(ref lock, ref cvar) = &*STW_COND.clone();
    let mut count = lock.lock().unwrap();
    *count += 1;
qinsoon's avatar
qinsoon committed
245

246
    mutator.global.set_still_blocked(true);
qinsoon's avatar
qinsoon committed
247

248
249
250
    while mutator.global.is_still_blocked() {
        count = cvar.wait(count).unwrap();
    }
qinsoon's avatar
qinsoon committed
251

252
253
254
    trace!("Mutator{} unblocked", mutator.id());
}

qinsoon's avatar
qinsoon committed
255
pub static GC_COUNT: atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
256
257

fn gc() {
qinsoon's avatar
qinsoon committed
258
    if !ENABLE_GC.load(Ordering::SeqCst) {
259
260
261
        panic!("Triggering GC when GC is disabled");
    }

qinsoon's avatar
qinsoon committed
262
263
    GC_COUNT.store(
        GC_COUNT.load(atomic::Ordering::SeqCst) + 1,
264
        atomic::Ordering::SeqCst
qinsoon's avatar
qinsoon committed
265
266
    );

267
    trace!("GC starts");
qinsoon's avatar
qinsoon committed
268

269
    // creates root deque
qinsoon's avatar
qinsoon committed
270
    let mut roots: &mut Vec<ObjectReference> = &mut ROOTS.write().unwrap();
271
    trace!("total roots: {}", roots.len());
qinsoon's avatar
qinsoon committed
272

273
274
    // mark & trace
    {
qinsoon's avatar
qinsoon committed
275
276
277
        let gccontext_guard = MY_GC.read().unwrap();
        let gccontext = gccontext_guard.as_ref().unwrap();
        let (immix_space, lo_space) = (&gccontext.immix_space, &gccontext.lo_space);
qinsoon's avatar
qinsoon committed
278

279
280
        start_trace(&mut roots, immix_space.clone(), lo_space.clone());
    }
qinsoon's avatar
qinsoon committed
281

282
    trace!("trace done");
qinsoon's avatar
qinsoon committed
283

284
285
    // sweep
    {
qinsoon's avatar
qinsoon committed
286
287
        let gccontext_guard = MY_GC.read().unwrap();
        let gccontext = gccontext_guard.as_ref().unwrap();
288

qinsoon's avatar
qinsoon committed
289
        let ref immix_space = gccontext.immix_space;
290
        immix_space.sweep();
291

qinsoon's avatar
qinsoon committed
292
        let ref lo_space = gccontext.lo_space;
293
        lo_space.sweep();
294
    }
qinsoon's avatar
qinsoon committed
295

296
297
298
299
    objectmodel::flip_mark_state();
    trace!("GC finishes");
}

qinsoon's avatar
qinsoon committed
300
301
pub const PUSH_BACK_THRESHOLD: usize = 50;
pub static GC_THREADS: atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
302
303
304

#[allow(unused_variables)]
#[inline(never)]
qinsoon's avatar
qinsoon committed
305
306
307
pub fn start_trace(
    work_stack: &mut Vec<ObjectReference>,
    immix_space: Arc<ImmixSpace>,
308
    lo_space: Arc<FreeListSpace>
qinsoon's avatar
qinsoon committed
309
) {
310
    // creates root deque
311
    let (worker, stealer) = deque();
qinsoon's avatar
qinsoon committed
312

313
314
315
316
317
    while !work_stack.is_empty() {
        worker.push(work_stack.pop().unwrap());
    }

    loop {
qinsoon's avatar
qinsoon committed
318
319
        let (sender, receiver) = channel::<ObjectReference>();

320
321
322
323
324
325
326
327
328
329
330
        let mut gc_threads = vec![];
        for _ in 0..GC_THREADS.load(atomic::Ordering::SeqCst) {
            let new_immix_space = immix_space.clone();
            let new_lo_space = lo_space.clone();
            let new_stealer = stealer.clone();
            let new_sender = sender.clone();
            let t = thread::spawn(move || {
                start_steal_trace(new_stealer, new_sender, new_immix_space, new_lo_space);
            });
            gc_threads.push(t);
        }
qinsoon's avatar
qinsoon committed
331

332
333
        // only stealers own sender, when all stealers quit, the following loop finishes
        drop(sender);
qinsoon's avatar
qinsoon committed
334

335
336
337
338
        loop {
            let recv = receiver.recv();
            match recv {
                Ok(obj) => worker.push(obj),
339
                Err(_) => break
340
341
            }
        }
qinsoon's avatar
qinsoon committed
342

343
344
        match worker.try_pop() {
            Some(obj_ref) => worker.push(obj_ref),
345
            None => break
346
347
348
349
350
        }
    }
}

#[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
351
352
353
354
fn start_steal_trace(
    stealer: Stealer<ObjectReference>,
    job_sender: mpsc::Sender<ObjectReference>,
    immix_space: Arc<ImmixSpace>,
355
    lo_space: Arc<FreeListSpace>
qinsoon's avatar
qinsoon committed
356
) {
357
    use objectmodel;
qinsoon's avatar
qinsoon committed
358

359
    let mut local_queue = vec![];
360
    let mark_state = objectmodel::load_mark_state();
qinsoon's avatar
qinsoon committed
361

362
363
364
365
366
367
368
369
370
    loop {
        let work = {
            if !local_queue.is_empty() {
                local_queue.pop().unwrap()
            } else {
                let work = stealer.steal();
                match work {
                    Steal::Empty => return,
                    Steal::Abort => continue,
371
                    Steal::Data(obj) => obj
372
373
374
                }
            }
        };
qinsoon's avatar
qinsoon committed
375
376
377
378
379
380
381

        steal_trace_object(
            work,
            &mut local_queue,
            &job_sender,
            mark_state,
            &immix_space,
382
            &lo_space
qinsoon's avatar
qinsoon committed
383
        );
384
    }
qinsoon's avatar
qinsoon committed
385
}
386
387

#[inline(always)]
388
#[cfg(feature = "use-sidemap")]
qinsoon's avatar
qinsoon committed
389
390
391
392
393
394
pub fn steal_trace_object(
    obj: ObjectReference,
    local_queue: &mut Vec<ObjectReference>,
    job_sender: &mpsc::Sender<ObjectReference>,
    mark_state: u8,
    immix_space: &ImmixSpace,
395
    lo_space: &FreeListSpace
qinsoon's avatar
qinsoon committed
396
) {
qinsoon's avatar
qinsoon committed
397
398
    if cfg!(debug_assertions) {
        // check if this object in within the heap, if it is an object
qinsoon's avatar
qinsoon committed
399
400
401
        if !immix_space.is_valid_object(obj.to_address()) &&
            !lo_space.is_valid_object(obj.to_address())
        {
qinsoon's avatar
qinsoon committed
402
            use std::process;
qinsoon's avatar
qinsoon committed
403

qinsoon's avatar
qinsoon committed
404
405
406
            println!("trying to trace an object that is not valid");
            println!("address: 0x{:x}", obj);
            println!("---");
407
408
            println!("immix space: {}", immix_space);
            println!("lo space: {}", lo_space);
qinsoon's avatar
qinsoon committed
409

qinsoon's avatar
qinsoon committed
410
411
            println!("invalid object during tracing");
            process::exit(101);
qinsoon's avatar
qinsoon committed
412
413
        }
    }
qinsoon's avatar
qinsoon committed
414

415
    let addr = obj.to_address();
qinsoon's avatar
qinsoon committed
416

417
418
    let (alloc_map, space_start) = if immix_space.addr_in_space(addr) {
        // mark object
qinsoon's avatar
qinsoon committed
419
420
421
422
        objectmodel::mark_as_traced(
            immix_space.trace_map(),
            immix_space.start(),
            obj,
423
            mark_state
qinsoon's avatar
qinsoon committed
424
        );
425
426
427
428
429
430
431
432

        // mark line
        immix_space.line_mark_table.mark_line_live(addr);

        (immix_space.alloc_map(), immix_space.start())
    } else if lo_space.addr_in_space(addr) {
        // mark object
        objectmodel::mark_as_traced(lo_space.trace_map(), lo_space.start(), obj, mark_state);
433
        trace!("mark object @ {} to {}", obj, mark_state);
434
435

        (lo_space.alloc_map(), lo_space.start())
436
    } else {
437
438
439
440
441
442
        println!("unexpected address: {}", addr);
        println!("immix space: {}", immix_space);
        println!("lo space   : {}", lo_space);

        panic!("error during tracing object")
    };
qinsoon's avatar
qinsoon committed
443

444
445
    let mut base = addr;
    loop {
446
        let value = objectmodel::get_ref_byte(alloc_map, space_start, obj);
qinsoon's avatar
qinsoon committed
447
448
        let (ref_bits, short_encode) = (
            bit_utils::lower_bits_u8(value, objectmodel::REF_BITS_LEN),
449
            bit_utils::test_nth_bit_u8(value, objectmodel::SHORT_ENCODE_BIT)
qinsoon's avatar
qinsoon committed
450
        );
451
        match ref_bits {
qinsoon's avatar
qinsoon committed
452
            0b0000_0000 => {}
453
            0b0000_0001 => {
qinsoon's avatar
qinsoon committed
454
455
456
457
458
459
460
                steal_process_edge(
                    base,
                    0,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
461
                    lo_space
qinsoon's avatar
qinsoon committed
462
463
                );
            }
464
            0b0000_0011 => {
qinsoon's avatar
qinsoon committed
465
466
467
468
469
470
471
                steal_process_edge(
                    base,
                    0,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
472
                    lo_space
qinsoon's avatar
qinsoon committed
473
474
475
476
477
478
479
480
                );
                steal_process_edge(
                    base,
                    8,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
481
                    lo_space
qinsoon's avatar
qinsoon committed
482
483
                );
            }
484
            0b0000_1111 => {
qinsoon's avatar
qinsoon committed
485
486
487
488
489
490
491
                steal_process_edge(
                    base,
                    0,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
492
                    lo_space
qinsoon's avatar
qinsoon committed
493
494
495
496
497
498
499
500
                );
                steal_process_edge(
                    base,
                    8,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
501
                    lo_space
qinsoon's avatar
qinsoon committed
502
503
504
505
506
507
508
509
                );
                steal_process_edge(
                    base,
                    16,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
510
                    lo_space
qinsoon's avatar
qinsoon committed
511
512
513
514
515
516
517
518
                );
                steal_process_edge(
                    base,
                    24,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
519
                    lo_space
qinsoon's avatar
qinsoon committed
520
521
                );
            }
522
            _ => {
523
524
                error!("unexpected ref_bits patterns: {:b}", ref_bits);
                unimplemented!()
525
526
            }
        }
527

528
529
530
        if short_encode {
            return;
        } else {
qinsoon's avatar
qinsoon committed
531
            base = base.plus(objectmodel::REF_BITS_LEN * POINTER_SIZE);
qinsoon's avatar
qinsoon committed
532
        }
533
534
535
536
    }
}

#[inline(always)]
537
#[cfg(not(feature = "use-sidemap"))]
qinsoon's avatar
qinsoon committed
538
539
540
541
542
543
pub fn steal_trace_object(
    obj: ObjectReference,
    local_queue: &mut Vec<ObjectReference>,
    job_sender: &mpsc::Sender<ObjectReference>,
    mark_state: u8,
    immix_space: &ImmixSpace,
544
    lo_space: &FreeListSpace
qinsoon's avatar
qinsoon committed
545
) {
546
547
    if cfg!(debug_assertions) {
        // check if this object in within the heap, if it is an object
qinsoon's avatar
qinsoon committed
548
549
550
        if !immix_space.is_valid_object(obj.to_address()) &&
            !lo_space.is_valid_object(obj.to_address())
        {
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
            use std::process;

            println!("trying to trace an object that is not valid");
            println!("address: 0x{:x}", obj);
            println!("---");
            println!("immix space: {}", immix_space);
            println!("lo space: {}", lo_space);

            println!("invalid object during tracing");
            process::exit(101);
        }
    }

    let addr = obj.to_address();

    // mark object
    objectmodel::mark_as_traced(obj, mark_state);

    if immix_space.addr_in_space(addr) {
        // mark line
        immix_space.line_mark_table.mark_line_live(addr);
    } else if lo_space.addr_in_space(addr) {
        // do nothing
    } else {
        println!("unexpected address: {}", addr);
        println!("immix space: {}", immix_space);
        println!("lo space   : {}", lo_space);

        panic!("error during tracing object")
    }

582
583
584
    // this part of code has some duplication with code in objectdump
    // FIXME: remove the duplicate code - use 'Tracer' trait

qinsoon's avatar
qinsoon committed
585
    let hdr = unsafe { (addr + objectmodel::OBJECT_HEADER_OFFSET).load::<u64>() };
586
587
588
589
590
591
592
593

    if objectmodel::header_is_fix_size(hdr) {
        // fix sized type
        if objectmodel::header_has_ref_map(hdr) {
            // has ref map
            let ref_map = objectmodel::header_get_ref_map(hdr);

            match ref_map {
qinsoon's avatar
qinsoon committed
594
                0 => {}
595
                0b0000_0001 => {
qinsoon's avatar
qinsoon committed
596
597
598
599
600
601
602
                    steal_process_edge(
                        addr,
                        0,
                        local_queue,
                        job_sender,
                        mark_state,
                        immix_space,
603
                        lo_space
qinsoon's avatar
qinsoon committed
604
                    );
605
606
                }
                0b0000_0011 => {
qinsoon's avatar
qinsoon committed
607
608
609
610
611
612
613
                    steal_process_edge(
                        addr,
                        0,
                        local_queue,
                        job_sender,
                        mark_state,
                        immix_space,
614
                        lo_space
qinsoon's avatar
qinsoon committed
615
616
617
618
619
620
621
622
                    );
                    steal_process_edge(
                        addr,
                        8,
                        local_queue,
                        job_sender,
                        mark_state,
                        immix_space,
623
                        lo_space
qinsoon's avatar
qinsoon committed
624
625
                    );
                }
626
                0b0000_1111 => {
qinsoon's avatar
qinsoon committed
627
628
629
630
631
632
633
                    steal_process_edge(
                        addr,
                        0,
                        local_queue,
                        job_sender,
                        mark_state,
                        immix_space,
634
                        lo_space
qinsoon's avatar
qinsoon committed
635
636
637
638
639
640
641
642
                    );
                    steal_process_edge(
                        addr,
                        8,
                        local_queue,
                        job_sender,
                        mark_state,
                        immix_space,
643
                        lo_space
qinsoon's avatar
qinsoon committed
644
645
646
647
648
649
650
651
                    );
                    steal_process_edge(
                        addr,
                        16,
                        local_queue,
                        job_sender,
                        mark_state,
                        immix_space,
652
                        lo_space
qinsoon's avatar
qinsoon committed
653
654
655
656
657
658
659
660
                    );
                    steal_process_edge(
                        addr,
                        24,
                        local_queue,
                        job_sender,
                        mark_state,
                        immix_space,
661
                        lo_space
qinsoon's avatar
qinsoon committed
662
663
                    );
                }
664
                _ => {
665
666
667
668
                    warn!("ref bits fall into slow path: {:b}", ref_map);

                    let mut i = 0;
                    while i < objectmodel::REF_MAP_LENGTH {
qinsoon's avatar
qinsoon committed
669
                        let has_ref: bool = ((ref_map >> i) & 1) == 1;
670
671

                        if has_ref {
qinsoon's avatar
qinsoon committed
672
673
674
675
676
677
678
                            steal_process_edge(
                                addr,
                                i * POINTER_SIZE,
                                local_queue,
                                job_sender,
                                mark_state,
                                immix_space,
679
                                lo_space
qinsoon's avatar
qinsoon committed
680
                            );
681
682
683
684
                        }

                        i += 1;
                    }
685
686
687
688
                }
            }
        } else {
            // by type ID
689
690
691
            let gctype_id = objectmodel::header_get_gctype_id(hdr);

            let gc_lock = MY_GC.read().unwrap();
qinsoon's avatar
qinsoon committed
692
693
            let gctype: Arc<GCType> =
                gc_lock.as_ref().unwrap().gc_types[gctype_id as usize].clone();
694
695

            for offset in gctype.gen_ref_offsets() {
qinsoon's avatar
qinsoon committed
696
697
698
699
700
701
702
                steal_process_edge(
                    addr,
                    offset,
                    local_queue,
                    job_sender,
                    mark_state,
                    immix_space,
703
                    lo_space
qinsoon's avatar
qinsoon committed
704
                );
705
            }
706
707
708
        }
    } else {
        // hybrids
709
        let gctype_id = objectmodel::header_get_gctype_id(hdr);
qinsoon's avatar
qinsoon committed
710
        let var_length = objectmodel::header_get_hybrid_length(hdr);
711
712

        let gc_lock = MY_GC.read().unwrap();
qinsoon's avatar
qinsoon committed
713
        let gctype: Arc<GCType> = gc_lock.as_ref().unwrap().gc_types[gctype_id as usize].clone();
714

qinsoon's avatar
qinsoon committed
715
        for offset in gctype.gen_hybrid_ref_offsets(var_length) {
qinsoon's avatar
qinsoon committed
716
717
718
719
720
721
722
            steal_process_edge(
                addr,
                offset,
                local_queue,
                job_sender,
                mark_state,
                immix_space,
723
                lo_space
qinsoon's avatar
qinsoon committed
724
            );
725
        }
726
727
728
729
730
    }
}

#[inline(always)]
#[cfg(feature = "use-sidemap")]
qinsoon's avatar
qinsoon committed
731
732
733
734
735
736
737
pub fn steal_process_edge(
    base: Address,
    offset: usize,
    local_queue: &mut Vec<ObjectReference>,
    job_sender: &mpsc::Sender<ObjectReference>,
    mark_state: u8,
    immix_space: &ImmixSpace,
738
    lo_space: &FreeListSpace
qinsoon's avatar
qinsoon committed
739
) {
qinsoon's avatar
qinsoon committed
740
    let field_addr = base.plus(offset);
qinsoon's avatar
qinsoon committed
741
742
    let edge = unsafe { field_addr.load::<ObjectReference>() };

qinsoon's avatar
qinsoon committed
743
    if cfg!(debug_assertions) {
qinsoon's avatar
qinsoon committed
744
        use std::process;
qinsoon's avatar
qinsoon committed
745
        // check if this object in within the heap, if it is an object
qinsoon's avatar
qinsoon committed
746
747
748
        if !edge.to_address().is_zero() && !immix_space.is_valid_object(edge.to_address()) &&
            !lo_space.is_valid_object(edge.to_address())
        {
qinsoon's avatar
qinsoon committed
749
            println!("trying to follow an edge that is not a valid object");
qinsoon's avatar
qinsoon committed
750
            println!("edge address: 0x{:x} from 0x{:x}", edge, field_addr);
qinsoon's avatar
qinsoon committed
751
752
            println!("base address: 0x{:x}", base);
            println!("---");
753
            if immix_space.addr_in_space(base) {
qinsoon's avatar
qinsoon committed
754
755
756
757
                objectmodel::print_object(
                    base,
                    immix_space.start(),
                    immix_space.trace_map(),
758
                    immix_space.alloc_map()
qinsoon's avatar
qinsoon committed
759
                );
760
761
762
                println!("---");
                println!("immix space:{}", immix_space);
            } else if lo_space.addr_in_space(base) {
qinsoon's avatar
qinsoon committed
763
764
765
766
                objectmodel::print_object(
                    base,
                    lo_space.start(),
                    lo_space.trace_map(),
767
                    lo_space.alloc_map()
qinsoon's avatar
qinsoon committed
768
                );
769
770
771
772
773
                println!("---");
                println!("lo space:{}", lo_space);
            } else {
                println!("not in immix/lo space")
            }
qinsoon's avatar
qinsoon committed
774

qinsoon's avatar
qinsoon committed
775
776
            println!("invalid object during tracing");
            process::exit(101);
qinsoon's avatar
qinsoon committed
777
778
        }
    }
779

780
    if !edge.to_address().is_zero() {
qinsoon's avatar
qinsoon committed
781
782
783
784
785
        if immix_space.addr_in_space(edge.to_address()) &&
            !objectmodel::is_traced(
                immix_space.trace_map(),
                immix_space.start(),
                edge,
786
                mark_state
qinsoon's avatar
qinsoon committed
787
            ) {
788
789
790
791
792
            if local_queue.len() >= PUSH_BACK_THRESHOLD {
                job_sender.send(edge).unwrap();
            } else {
                local_queue.push(edge);
            }
qinsoon's avatar
qinsoon committed
793
794
795
796
797
        } else if lo_space.addr_in_space(edge.to_address()) &&
                   !objectmodel::is_traced(
                lo_space.trace_map(),
                lo_space.start(),
                edge,
798
                mark_state
qinsoon's avatar
qinsoon committed
799
            ) {
800
801
802
803
            if local_queue.len() >= PUSH_BACK_THRESHOLD {
                job_sender.send(edge).unwrap();
            } else {
                local_queue.push(edge);
804
805
806
            }
        }
    }
807
808
809
810
}

#[inline(always)]
#[cfg(not(feature = "use-sidemap"))]
qinsoon's avatar
qinsoon committed
811
812
813
814
815
816
817
pub fn steal_process_edge(
    base: Address,
    offset: usize,
    local_queue: &mut Vec<ObjectReference>,
    job_sender: &mpsc::Sender<ObjectReference>,
    mark_state: u8,
    immix_space: &ImmixSpace,
818
    lo_space: &FreeListSpace
qinsoon's avatar
qinsoon committed
819
) {
820
    let field_addr = base + offset;
qinsoon's avatar
qinsoon committed
821
    let edge = unsafe { field_addr.load::<ObjectReference>() };
822
823
824
825

    if cfg!(debug_assertions) {
        use std::process;
        // check if this object in within the heap, if it is an object
qinsoon's avatar
qinsoon committed
826
827
828
        if !edge.to_address().is_zero() && !immix_space.is_valid_object(edge.to_address()) &&
            !lo_space.is_valid_object(edge.to_address())
        {
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
            println!("trying to follow an edge that is not a valid object");
            println!("edge address: 0x{:x} from 0x{:x}", edge, field_addr);
            println!("base address: 0x{:x}", base);
            println!("---");
            if immix_space.addr_in_space(base) {
                objectmodel::print_object(base);
                objectmodel::print_object(edge.to_address());
                println!("---");
                println!("immix space:{}", immix_space);
            } else if lo_space.addr_in_space(base) {
                objectmodel::print_object(base);
                println!("---");
                println!("lo space:{}", lo_space);
            } else {
                println!("not in immix/lo space")
            }

            println!("invalid object during tracing");
            process::exit(101);
        }
    }

    if !edge.to_address().is_zero() {
        if !objectmodel::is_traced(edge, mark_state) {
            if local_queue.len() >= PUSH_BACK_THRESHOLD {
                job_sender.send(edge).unwrap();
            } else {
                local_queue.push(edge);
            }
        }
    }
860
}