Commit 3f35c61b authored by qinsoon's avatar qinsoon

[wip] can dump a linked list as HeapDump

need to put HeapDump as data section in asm
parent 9967ebe7
use utils::Address;
use utils::ByteSize;
use utils::POINTER_SIZE;
use common::gctype::*;
use MY_GC;
use objectmodel;
use std::collections::HashMap;
use std::sync::Arc;
pub struct HeapDump {
pub objects: HashMap<Address, ObjectDump>,
pub relocatable_refs: HashMap<Address, String>
}
#[derive(Debug, Clone)]
pub struct ObjectDump {
pub reference_addr: Address,
pub mem_start: Address,
pub mem_size : ByteSize,
pub reference_offsets: Vec<ByteSize>
}
impl HeapDump {
pub fn from_roots(roots: Vec<Address>) -> HeapDump {
let mut work_queue : Vec<Address> = roots;
let mut heap : HeapDump = HeapDump {
objects: HashMap::new(),
relocatable_refs: HashMap::new()
};
while !work_queue.is_empty() {
let obj = work_queue.pop().unwrap();
if !heap.objects.contains_key(&obj) {
// add this object to heap dump
let obj_dump = heap.persist_object(obj);
heap.objects.insert(obj, obj_dump);
heap.keep_tracing(heap.objects.get(&obj).unwrap(), &mut work_queue);
}
}
heap.label_relocatable_refs();
heap
}
fn persist_object(&self, obj: Address) -> ObjectDump {
let hdr_addr = obj.offset(objectmodel::OBJECT_HEADER_OFFSET);
let hdr = unsafe {hdr_addr.load::<u64>()};
if objectmodel::header_is_fix_size(hdr) {
// fix sized type
if objectmodel::header_has_ref_map(hdr) {
// has ref map
let ref_map = objectmodel::header_get_ref_map(hdr);
let mut offsets = vec![];
let mut i = 0;
while i < objectmodel::REF_MAP_LENGTH {
let has_ref : bool = ((ref_map >> i) & 1) == 1;
if has_ref {
offsets.push(i * POINTER_SIZE);
}
i += 1;
}
ObjectDump {
reference_addr : obj,
mem_start : hdr_addr,
mem_size : objectmodel::header_get_object_size(hdr) as usize + objectmodel::OBJECT_HEADER_SIZE,
reference_offsets: offsets
}
} else {
// by type ID
let gctype_id = objectmodel::header_get_gctype_id(hdr);
let gc_lock = MY_GC.read().unwrap();
let gctype : Arc<GCType> = gc_lock.as_ref().unwrap().gc_types[gctype_id as usize].clone();
ObjectDump {
reference_addr: obj,
mem_start : hdr_addr,
mem_size : gctype.size,
reference_offsets: gctype.gen_ref_offsets()
}
}
} else {
// hybrids - same as above
let gctype_id = objectmodel::header_get_gctype_id(hdr);
let gc_lock = MY_GC.read().unwrap();
let gctype : Arc<GCType> = gc_lock.as_ref().unwrap().gc_types[gctype_id as usize].clone();
ObjectDump {
reference_addr: obj,
mem_start : hdr_addr,
mem_size : gctype.size,
reference_offsets: gctype.gen_ref_offsets()
}
}
}
fn keep_tracing(&self, obj_dump: &ObjectDump, work_queue: &mut Vec<Address>) {
let base = obj_dump.reference_addr;
for offset in obj_dump.reference_offsets.iter() {
let field_addr = base.plus(*offset);
let edge = unsafe {field_addr.load::<Address>()};
if !edge.is_zero() && !self.objects.contains_key(&edge) {
work_queue.push(edge);
}
}
}
fn label_relocatable_refs(&mut self) {
let mut count = 0;
for addr in self.objects.keys() {
let label = format!("GCDUMP_{}_{}", count, addr);
self.relocatable_refs.insert(*addr, label);
count += 1;
}
}
}
use std::fmt;
impl fmt::Debug for ObjectDump {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"PersistedObject({}, {} bytes from {}, offsets at {:?})",
self.reference_addr, self.mem_size, self.mem_start, self.reference_offsets
)
}
}
impl fmt::Debug for HeapDump {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Heap Dump\n").unwrap();
write!(f, "---{} objects---\n", self.objects.len()).unwrap();
for obj in self.objects.iter() {
write!(f, "{:?}\n", obj).unwrap();
}
write!(f, "---{} ref labels---\n", self.relocatable_refs.len()).unwrap();
for (addr, label) in self.relocatable_refs.iter() {
write!(f, "{} = {}\n", addr, label).unwrap()
}
Ok(())
}
}
\ No newline at end of file
......@@ -4,6 +4,7 @@ use heap::immix::ImmixMutatorLocal;
use heap::immix::ImmixSpace;
use heap::freelist::FreeListSpace;
use objectmodel;
use common::gctype::*;
use heap::Space;
use MY_GC;
......@@ -438,6 +439,9 @@ pub fn steal_trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectRefe
panic!("error during tracing object")
}
// this part of code has some duplication with code in objectdump
// FIXME: remove the duplicate code - use 'Tracer' trait
let hdr = unsafe {addr.offset(objectmodel::OBJECT_HEADER_OFFSET).load::<u64>()};
if objectmodel::header_is_fix_size(hdr) {
......@@ -464,17 +468,41 @@ pub fn steal_trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectRefe
steal_process_edge(addr, 24,local_queue, job_sender, mark_state, immix_space, lo_space);
},
_ => {
error!("unexpected ref_bits patterns: {:b}", ref_map);
unimplemented!()
warn!("ref bits fall into slow path: {:b}", ref_map);
let mut i = 0;
while i < objectmodel::REF_MAP_LENGTH {
let has_ref : bool = ((ref_map >> i) & 1) == 1;
if has_ref {
steal_process_edge(addr, i * POINTER_SIZE, local_queue, job_sender, mark_state, immix_space, lo_space);
}
i += 1;
}
}
}
} else {
// by type ID
unimplemented!()
let gctype_id = objectmodel::header_get_gctype_id(hdr);
let gc_lock = MY_GC.read().unwrap();
let gctype : Arc<GCType> = gc_lock.as_ref().unwrap().gc_types[gctype_id as usize].clone();
for offset in gctype.gen_ref_offsets() {
steal_process_edge(addr, offset, local_queue, job_sender, mark_state, immix_space, lo_space);
}
}
} else {
// hybrids
unimplemented!()
let gctype_id = objectmodel::header_get_gctype_id(hdr);
let gc_lock = MY_GC.read().unwrap();
let gctype : Arc<GCType> = gc_lock.as_ref().unwrap().gc_types[gctype_id as usize].clone();
for offset in gctype.gen_ref_offsets() {
steal_process_edge(addr, offset, local_queue, job_sender, mark_state, immix_space, lo_space);
}
}
}
......
......@@ -21,8 +21,10 @@ use heap::immix::ImmixSpace;
use heap::immix::ImmixMutatorLocal;
use heap::freelist;
use heap::freelist::FreeListSpace;
use common::objectdump;
use utils::LinkedHashSet;
use utils::Address;
use std::fmt;
use std::sync::Arc;
......@@ -209,3 +211,9 @@ pub extern fn muentry_alloc_large(mutator: *mut ImmixMutatorLocal, size: usize,
pub extern fn force_gc() {
heap::gc::trigger_gc();
}
// dump heap
#[no_mangle]
pub extern fn persist_heap(roots: Vec<Address>) -> objectdump::HeapDump {
objectdump::HeapDump::from_roots(roots)
}
\ No newline at end of file
......@@ -27,7 +27,7 @@ pub fn flip(mark: u8) -> u8 {
mark ^ 1
}
// sidemap object model
// --- sidemap object model ---
#[cfg(feature = "use-sidemap")]
pub use self::sidemap::gen_gctype_encode;
......@@ -52,7 +52,7 @@ pub use self::sidemap::is_traced;
#[cfg(feature = "use-sidemap")]
pub use self::sidemap::get_ref_byte;
// header
// --- header ----
#[cfg(not(feature = "use-sidemap"))]
pub use self::header::gen_gctype_encode;
......@@ -75,6 +75,8 @@ pub use self::header::MASK_HYBRID_LENGTH;
#[cfg(not(feature = "use-sidemap"))]
pub use self::header::MASK_REF_MAP;
#[cfg(not(feature = "use-sidemap"))]
pub use self::header::REF_MAP_LENGTH;
#[cfg(not(feature = "use-sidemap"))]
pub use self::header::SHR_HYBRID_LENGTH;
// header location/size
......@@ -101,4 +103,5 @@ pub use self::header::header_is_object_start;
pub use self::header::header_get_gctype_id;
#[cfg(not(feature = "use-sidemap"))]
pub use self::header::header_get_ref_map;
#[cfg(not(feature = "use-sidemap"))]
pub use self::header::header_get_object_size;
\ No newline at end of file
......@@ -171,6 +171,41 @@ fn create_linked_list() {
mutator.destroy();
}
#[test]
fn linked_list_heap_dump() {
unsafe {heap::gc::set_low_water_mark();}
start_logging();
gc::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 1);
gc::gc_stats();
let mut mutator = gc::new_mutator();
{
let mut linked_list = LinkedList::new(&mut mutator);
const N: usize = 5;
for i in 0..N {
linked_list.add(i);
println!("after add: {:?}", linked_list);
}
// check
linked_list.verify((0..N).collect());
// dump heap from head
let head_addr = Address::from_mut_ptr(linked_list.head);
let heap_dump = gc::persist_heap(vec![head_addr]);
println!("{:?}", heap_dump);
}
mutator.destroy();
}
#[test]
#[ignore]
// disable this test because it will cause gcbench fail for unknown reason
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment