WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

Commit 9cd0463b authored by qinsoon's avatar qinsoon
Browse files

refactored the code

parent 0d6b3fc2
......@@ -23,19 +23,16 @@ pub struct FreelistAllocator {
mutator: *mut Mutator
}
impl FreelistAllocator {
pub fn new(space: Raw<FreelistSpace>) -> FreelistAllocator {
FreelistAllocator {
space,
mutator: ptr::null_mut()
}
}
impl Allocator for FreelistAllocator {
fn prepare_for_gc(&mut self) {}
fn reset_after_gc(&mut self) {}
fn destroy(&mut self) {}
pub fn set_mutator(&mut self, mutator: *mut Mutator) {
fn set_mutator(&mut self, mutator: *mut Mutator) {
self.mutator = mutator;
}
pub fn alloc(&mut self, size: ByteSize, align: ByteSize) -> Address {
fn alloc(&mut self, size: ByteSize, align: ByteSize) -> Address {
loop {
unsafe { &mut *self.mutator }.yieldpoint();
......@@ -48,6 +45,15 @@ impl FreelistAllocator {
}
}
}
}
impl FreelistAllocator {
pub fn new(space: Raw<FreelistSpace>) -> FreelistAllocator {
FreelistAllocator {
space,
mutator: ptr::null_mut()
}
}
pub fn init_object(&mut self, addr: Address, encode: LargeObjectEncode) {
let slot = self.space.get_type_encode_slot(addr);
......
......@@ -15,7 +15,6 @@
use common::ptr::*;
use heap::*;
use objectmodel::sidemap::*;
use utils::*;
use utils::mem::memmap;
use utils::mem::memsec::memzero;
......@@ -75,6 +74,99 @@ impl RawMemoryMetadata for FreelistSpace {
}
}
impl Space for FreelistSpace {
#[inline(always)]
fn start(&self) -> Address {
self.start
}
#[inline(always)]
fn end(&self) -> Address {
self.cur_end
}
#[inline(always)]
#[allow(unused_variables)]
fn is_valid_object(&self, addr: Address) -> bool {
true
}
fn destroy(&mut self) {}
fn prepare_for_gc(&mut self) {
// erase page mark
unsafe {
memzero(
&mut self.page_mark_table[0] as *mut PageMark,
self.cur_pages
);
}
}
fn sweep(&mut self) {
debug!("=== {:?} Sweep ===", self.desc);
debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages);
let mut free_pages = 0;
let mut used_pages = 0;
{
let mut used_nodes = self.used_nodes.lock().unwrap();
let mut usable_nodes = self.usable_nodes.lock().unwrap();
let mut all_nodes: Vec<FreelistNode> = {
let mut ret = vec![];
ret.append(&mut used_nodes);
ret.append(&mut usable_nodes);
ret
};
debug_assert_eq!(all_nodes.len(), self.cur_pages);
while !all_nodes.is_empty() {
let node: FreelistNode = all_nodes.pop().unwrap();
let index = self.get_page_index(node.addr);
if self.page_mark_table[index] == PageMark::Live {
used_pages += node.size >> LOG_BYTES_IN_PAGE;
used_nodes.push(node);
} else {
free_pages += node.size >> LOG_BYTES_IN_PAGE;
usable_nodes.push(node);
}
}
}
if cfg!(debug_assertions) {
debug!("free pages = {} of {} total", free_pages, self.cur_pages);
debug!("used pages = {} of {} total", used_pages, self.cur_pages);
}
self.last_gc_free_pages = free_pages;
self.last_gc_used_pages = used_pages;
if self.n_used_pages() == self.total_pages && self.total_pages != 0 {
use std::process;
println!("Out of memory in Freelist Space");
process::exit(1);
}
debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages);
trace!("=======================");
}
#[inline(always)]
fn mark_object_traced(&mut self, obj: ObjectReference) {
let index = self.get_page_index(obj.to_address());
self.page_mark_table[index] = PageMark::Live;
}
#[inline(always)]
fn is_object_traced(&self, obj: ObjectReference) -> bool {
let index = self.get_page_index(obj.to_address());
self.page_mark_table[index] == PageMark::Live
}
}
impl FreelistSpace {
pub fn new(desc: SpaceDescriptor, space_size: ByteSize) -> Raw<FreelistSpace> {
let mut anon_mmap = match memmap::MmapMut::map_anon(
......@@ -137,23 +229,6 @@ impl FreelistSpace {
space
}
pub fn cleanup(&self) {}
#[inline(always)]
pub fn get(addr: Address) -> Raw<FreelistSpace> {
unsafe { Raw::from_addr(addr.mask(SPACE_LOWBITS_MASK)) }
}
pub fn prepare_for_gc(&mut self) {
// erase page mark
unsafe {
memzero(
&mut self.page_mark_table[0] as *mut PageMark,
self.cur_pages
);
}
}
#[inline(always)]
pub fn get_page_index(&self, obj: Address) -> usize {
(obj - self.mem_start()) >> LOG_BYTES_IN_PAGE
......@@ -228,69 +303,6 @@ impl FreelistSpace {
ret
}
pub fn sweep(&mut self) {
debug!("=== {:?} Sweep ===", self.desc);
debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages);
let mut free_pages = 0;
let mut used_pages = 0;
{
let mut used_nodes = self.used_nodes.lock().unwrap();
let mut usable_nodes = self.usable_nodes.lock().unwrap();
let mut all_nodes: Vec<FreelistNode> = {
let mut ret = vec![];
ret.append(&mut used_nodes);
ret.append(&mut usable_nodes);
ret
};
debug_assert_eq!(all_nodes.len(), self.cur_pages);
while !all_nodes.is_empty() {
let node: FreelistNode = all_nodes.pop().unwrap();
let index = self.get_page_index(node.addr);
if self.page_mark_table[index] == PageMark::Live {
used_pages += (node.size >> LOG_BYTES_IN_PAGE);
used_nodes.push(node);
} else {
free_pages += (node.size >> LOG_BYTES_IN_PAGE);
usable_nodes.push(node);
}
}
}
if cfg!(debug_assertions) {
debug!("free pages = {} of {} total", free_pages, self.cur_pages);
debug!("used pages = {} of {} total", used_pages, self.cur_pages);
}
self.last_gc_free_pages = free_pages;
self.last_gc_used_pages = used_pages;
if self.n_used_pages() == self.total_pages && self.total_pages != 0 {
use std::process;
println!("Out of memory in Freelist Space");
process::exit(1);
}
debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages);
trace!("=======================");
}
#[inline(always)]
pub fn mark_object_traced(&mut self, obj: ObjectReference) {
let index = self.get_page_index(obj.to_address());
self.page_mark_table[index] = PageMark::Live;
}
#[inline(always)]
pub fn is_object_traced(&self, obj: ObjectReference) -> bool {
let index = self.get_page_index(obj.to_address());
self.page_mark_table[index] == PageMark::Live
}
pub fn get_type_encode(&self, obj: ObjectReference) -> LargeObjectEncode {
let index = self.get_page_index(obj.to_address());
self.page_encode_table[index]
......@@ -335,22 +347,6 @@ impl FreelistSpace {
}
}
impl Space for FreelistSpace {
#[inline(always)]
fn start(&self) -> Address {
self.start
}
#[inline(always)]
fn end(&self) -> Address {
self.cur_end
}
#[inline(always)]
#[allow(unused_variables)]
fn is_valid_object(&self, addr: Address) -> bool {
true
}
}
#[repr(C)]
pub struct FreelistNode {
size: ByteSize,
......@@ -359,6 +355,7 @@ pub struct FreelistNode {
#[repr(u8)]
#[derive(Copy, Clone, Debug, PartialEq)]
#[allow(dead_code)] // we do not explicitly use Free, but we zero the page marks
pub enum PageMark {
Free = 0,
Live
......
......@@ -13,7 +13,6 @@
// limitations under the License.
use heap::*;
use heap::freelist::*;
use objectmodel;
use objectmodel::sidemap::*;
use MY_GC;
......@@ -375,11 +374,14 @@ pub fn steal_trace_object(
) {
match SpaceDescriptor::get(obj) {
SpaceDescriptor::ImmixTiny => {
let mut space = ImmixSpace::get::<ImmixSpace>(obj.to_address());
// mark current object traced
immix::mark_object_traced(obj);
space.mark_object_traced(obj);
let encode = unsafe {
ImmixSpace::get_type_byte_slot_static(obj.to_address()).load::<TinyObjectEncode>()
space
.get_type_byte_slot(space.get_word_index(obj.to_address()))
.load::<TinyObjectEncode>()
};
trace_if!(TRACE_GC, " trace tiny obj: {} ({:?})", obj, encode);
......@@ -394,12 +396,13 @@ pub fn steal_trace_object(
}
}
SpaceDescriptor::ImmixNormal => {
let mut space = ImmixSpace::get::<ImmixSpace>(obj.to_address());
//mark current object traced
immix::mark_object_traced(obj);
space.mark_object_traced(obj);
// get type encode
let (type_encode, type_size): (&TypeEncode, ByteOffset) = {
let type_slot = ImmixSpace::get_type_byte_slot_static(obj.to_address());
let type_slot = space.get_type_byte_slot(space.get_word_index(obj.to_address()));
let encode = unsafe { type_slot.load::<MediumObjectEncode>() };
let small_encode: &SmallObjectEncode = unsafe { transmute(&encode) };
......@@ -444,7 +447,7 @@ pub fn steal_trace_object(
trace_if!(TRACE_GC, " -done-");
}
SpaceDescriptor::Freelist => {
let mut space = FreelistSpace::get(obj.to_address());
let mut space = FreelistSpace::get::<FreelistSpace>(obj.to_address());
space.mark_object_traced(obj);
let encode = space.get_type_encode(obj);
......@@ -498,12 +501,13 @@ fn trace_word(
match SpaceDescriptor::get(edge) {
SpaceDescriptor::ImmixTiny | SpaceDescriptor::ImmixNormal => {
if !immix::is_object_traced(edge) {
let space = ImmixSpace::get::<ImmixSpace>(edge.to_address());
if !space.is_object_traced(edge) {
steal_process_edge(edge, local_queue, job_sender);
}
}
SpaceDescriptor::Freelist => {
let space = FreelistSpace::get(edge.to_address());
let space = FreelistSpace::get::<FreelistSpace>(edge.to_address());
if !space.is_object_traced(edge) {
debug!("edge {} is not traced, trace it", edge);
steal_process_edge(edge, local_queue, job_sender);
......
......@@ -12,8 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use heap::Mutator;
use heap::immix::*;
use heap::*;
use heap::immix::ImmixSpace;
use heap::immix::immix_space::ImmixBlock;
use objectmodel;
......@@ -46,49 +45,27 @@ lazy_static! {
pub static ref LIMIT_OFFSET : usize = offset_of!(ImmixAllocator=>limit).get_byte_offset();
}
impl ImmixAllocator {
pub fn reset(&mut self) -> () {
unsafe {
// should not use Address::zero() other than initialization
self.cursor = Address::zero();
self.limit = Address::zero();
self.large_cursor = Address::zero();
self.large_limit = Address::zero();
}
self.line = LINES_IN_BLOCK;
self.block = None;
self.large_block = None;
}
pub fn reset_after_gc(&mut self) {
impl Allocator for ImmixAllocator {
fn reset_after_gc(&mut self) {
self.reset();
}
pub fn new(space: Raw<ImmixSpace>) -> ImmixAllocator {
ImmixAllocator {
cursor: unsafe { Address::zero() },
limit: unsafe { Address::zero() },
line: LINES_IN_BLOCK,
block: None,
large_cursor: unsafe { Address::zero() },
large_limit: unsafe { Address::zero() },
large_block: None,
space,
mutator: ptr::null_mut()
}
fn prepare_for_gc(&mut self) {
self.return_block(true);
self.return_block(false);
}
pub fn set_mutator(&mut self, mutator: *mut Mutator) {
fn set_mutator(&mut self, mutator: *mut Mutator) {
self.mutator = mutator;
}
pub fn destroy(&mut self) {
fn destroy(&mut self) {
self.return_block(true);
self.return_block(false);
}
#[inline(always)]
pub fn alloc(&mut self, size: usize, align: usize) -> Address {
fn alloc(&mut self, size: usize, align: usize) -> Address {
// this part of code will slow down allocation
let align = objectmodel::check_alignment(align);
// end
......@@ -117,6 +94,35 @@ impl ImmixAllocator {
start
}
}
}
impl ImmixAllocator {
fn reset(&mut self) -> () {
unsafe {
// should not use Address::zero() other than initialization
self.cursor = Address::zero();
self.limit = Address::zero();
self.large_cursor = Address::zero();
self.large_limit = Address::zero();
}
self.line = LINES_IN_BLOCK;
self.block = None;
self.large_block = None;
}
pub fn new(space: Raw<ImmixSpace>) -> ImmixAllocator {
ImmixAllocator {
cursor: unsafe { Address::zero() },
limit: unsafe { Address::zero() },
line: LINES_IN_BLOCK,
block: None,
large_cursor: unsafe { Address::zero() },
large_limit: unsafe { Address::zero() },
large_block: None,
space,
mutator: ptr::null_mut()
}
}
#[inline(never)]
pub fn alloc_slow(&mut self, size: usize, align: usize) -> Address {
......@@ -283,10 +289,7 @@ impl ImmixAllocator {
}
}
pub fn prepare_for_gc(&mut self) {
self.return_block(true);
self.return_block(false);
}
fn return_block(&mut self, request_large: bool) {
if request_large {
......
......@@ -107,6 +107,183 @@ impl RawMemoryMetadata for ImmixSpace {
}
}
impl Space for ImmixSpace {
#[inline(always)]
fn start(&self) -> Address {
self.start
}
#[inline(always)]
fn end(&self) -> Address {
self.cur_end
}
#[inline(always)]
#[allow(unused_variables)]
fn is_valid_object(&self, addr: Address) -> bool {
// we cannot judge if it is a valid object, we always return true
true
}
fn destroy(&mut self) {}
fn prepare_for_gc(&mut self) {
// erase lines marks
let lines = self.cur_blocks << LOG_LINES_IN_BLOCK;
unsafe {
memsec::memzero(&mut self.line_mark_table[0] as *mut LineMark, lines);
}
// erase gc bytes
let words = self.cur_size >> LOG_POINTER_SIZE;
for i in 0..words {
self.gc_byte_table[i] = bit_utils::clear_bit_u8(self.gc_byte_table[i], GC_MARK_BIT);
}
}
#[allow(unused_variables)]
#[allow(unused_assignments)]
fn sweep(&mut self) {
debug!("=== {:?} Sweep ===", self.desc);
debug_assert_eq!(
self.n_used_blocks() + self.n_usable_blocks(),
self.cur_blocks
);
// some statistics
let mut free_lines = 0;
let mut used_lines = 0;
{
let mut used_blocks_lock = self.used_blocks.lock().unwrap();
let mut usable_blocks_lock = self.usable_blocks.lock().unwrap();
let mut all_blocks: LinkedList<Raw<ImmixBlock>> = {
let mut ret = LinkedList::new();
ret.append(&mut used_blocks_lock);
ret.append(&mut usable_blocks_lock);
ret
};
debug_assert_eq!(all_blocks.len(), self.cur_blocks);
while !all_blocks.is_empty() {
let block = all_blocks.pop_front().unwrap();
let line_index = self.get_line_mark_index(block.mem_start());
let block_index = self.get_block_mark_index(block.mem_start());
let mut has_free_lines = false;
// find free lines in the block, and set their line mark as free
// (not zeroing the memory yet)
for i in line_index..(line_index + LINES_IN_BLOCK) {
if self.line_mark_table[i] != LineMark::Live &&
self.line_mark_table[i] != LineMark::ConservLive
{
has_free_lines = true;
self.line_mark_table[i] = LineMark::Free;
free_lines += 1;
} else {
used_lines += 1;
}
}
if has_free_lines {
trace!("Block {} is usable", block.addr());
self.block_mark_table[block_index] = BlockMark::Usable;
usable_blocks_lock.push_front(block);
} else {
trace!("Block {} is full", block.addr());
self.block_mark_table[block_index] = BlockMark::Full;
used_blocks_lock.push_front(block);
}
}
}
if cfg!(debug_assertions) {
debug!(
"free lines = {} of {} total ({} blocks)",
free_lines,
self.cur_blocks * LINES_IN_BLOCK,
self.cur_blocks
);
debug!(
"used lines = {} of {} total ({} blocks)",
used_lines,
self.cur_blocks * LINES_IN_BLOCK,
self.cur_blocks
);
debug!("usable blocks = {}", self.n_usable_blocks());
debug!("full blocks = {}", self.n_used_blocks());
}
self.last_gc_free_lines = free_lines;
self.last_gc_used_lines = used_lines;
if self.n_used_blocks() == self.total_blocks && self.total_blocks != 0 {
println!("Out of memory in Immix Space");
process::exit(1);
}
debug_assert_eq!(
self.n_used_blocks() + self.n_usable_blocks(),
self.cur_blocks
);
trace!("=======================");
}
#[inline(always)]
fn mark_object_traced(&mut self, obj: ObjectReference) {
let obj_addr = obj.to_address();
// mark object
let obj_index = self.get_word_index(obj_addr);
let slot = self.get_gc_byte_slot(obj_index);
let gc_byte = unsafe { slot.load::<u8>() };
unsafe {
slot.store(gc_byte | GC_MARK_BIT);
}
if is_straddle_object(gc_byte) {
// we need to know object size, and mark multiple lines
let size = {
use std::mem::transmute;
let type_slot = self.get_type_byte_slot(obj_index);
let med_encode = unsafe { type_slot.load::<MediumObjectEncode>() };
let small_encode: &SmallObjectEncode = unsafe { transmute(&med_encode) };
if small_encode.is_small() {
small_encode.size()