WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit b6a1ffc3 authored by qinsoon's avatar qinsoon
Browse files

refactoring Address, supporting add/sub ops

parent 27df3a88
...@@ -3649,13 +3649,13 @@ pub fn emit_context_with_reloc(vm: &VM, ...@@ -3649,13 +3649,13 @@ pub fn emit_context_with_reloc(vm: &VM,
// get ready to go through from the object start (not mem_start) to the end // get ready to go through from the object start (not mem_start) to the end
let base = obj_dump.reference_addr; let base = obj_dump.reference_addr;
let end = obj_dump.mem_start.plus(obj_dump.mem_size); let end = obj_dump.mem_start + obj_dump.mem_size;
assert!(base.is_aligned_to(POINTER_SIZE)); assert!(base.is_aligned_to(POINTER_SIZE));
// offset as cursor // offset as cursor
let mut offset = 0; let mut offset = 0;
while offset < obj_dump.mem_size { while offset < obj_dump.mem_size {
let cur_addr = base.plus(offset); let cur_addr = base + offset;
if obj_dump.reference_offsets.contains(&offset) { if obj_dump.reference_offsets.contains(&offset) {
// if this offset is a reference field, we put a relocatable label generated by the GC // if this offset is a reference field, we put a relocatable label generated by the GC
...@@ -3683,7 +3683,7 @@ pub fn emit_context_with_reloc(vm: &VM, ...@@ -3683,7 +3683,7 @@ pub fn emit_context_with_reloc(vm: &VM,
// otherwise this offset is plain data // otherwise this offset is plain data
// write plain word (as bytes) // write plain word (as bytes)
let next_word_addr = cur_addr.plus(POINTER_SIZE); let next_word_addr = cur_addr + POINTER_SIZE;
if next_word_addr <= end { if next_word_addr <= end {
write_data_bytes(&mut file, cur_addr, next_word_addr); write_data_bytes(&mut file, cur_addr, next_word_addr);
} else { } else {
...@@ -3731,7 +3731,7 @@ fn write_data_bytes(f: &mut File, from: Address, to: Address) { ...@@ -3731,7 +3731,7 @@ fn write_data_bytes(f: &mut File, from: Address, to: Address) {
let byte = unsafe {cursor.load::<u8>()}; let byte = unsafe {cursor.load::<u8>()};
f.write_fmt(format_args!("0x{:x}", byte)).unwrap(); f.write_fmt(format_args!("0x{:x}", byte)).unwrap();
cursor = cursor.plus(1); cursor = cursor + 1 as ByteSize;
if cursor != to { if cursor != to {
f.write(",".as_bytes()).unwrap(); f.write(",".as_bytes()).unwrap();
} }
......
...@@ -35,8 +35,9 @@ pub use compiler::backend::x86_64::asm_backend::emit_context; ...@@ -35,8 +35,9 @@ pub use compiler::backend::x86_64::asm_backend::emit_context;
pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc; pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc;
#[cfg(feature = "aot")] #[cfg(feature = "aot")]
pub use compiler::backend::x86_64::asm_backend::spill_rewrite; pub use compiler::backend::x86_64::asm_backend::spill_rewrite;
use utils::Address;
use utils::Address;
use utils::ByteSize;
use ast::ptr::P; use ast::ptr::P;
use ast::ir::*; use ast::ir::*;
use ast::types::*; use ast::types::*;
...@@ -526,13 +527,13 @@ pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address { ...@@ -526,13 +527,13 @@ pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
/// gets the return address for the current frame pointer /// gets the return address for the current frame pointer
#[inline(always)] #[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address { pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() } unsafe { (frame_pointer + 8 as ByteSize).load::<Address>() }
} }
/// gets the stack pointer before the current frame was created /// gets the stack pointer before the current frame was created
#[inline(always)] #[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address { pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16) frame_pointer + 16 as ByteSize
} }
/// sets the stack point /// sets the stack point
...@@ -544,7 +545,7 @@ pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) { ...@@ -544,7 +545,7 @@ pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
/// gets the return address for the current frame pointer /// gets the return address for the current frame pointer
#[inline(always)] #[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) { pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) } unsafe { (frame_pointer + 8 as ByteSize).store::<Address>(value) }
} }
/// returns offset of callee saved register /// returns offset of callee saved register
......
...@@ -26,7 +26,7 @@ pub struct AddressBitmap { ...@@ -26,7 +26,7 @@ pub struct AddressBitmap {
impl AddressBitmap { impl AddressBitmap {
pub fn new(start: Address, end: Address) -> AddressBitmap { pub fn new(start: Address, end: Address) -> AddressBitmap {
let bitmap_len = end.diff(start) >> LOG_POINTER_SIZE; let bitmap_len = (end - start) >> LOG_POINTER_SIZE;
let bitmap = Bitmap::new(bitmap_len); let bitmap = Bitmap::new(bitmap_len);
AddressBitmap{start: start, end: end, bitmap: bitmap} AddressBitmap{start: start, end: end, bitmap: bitmap}
...@@ -37,7 +37,7 @@ impl AddressBitmap { ...@@ -37,7 +37,7 @@ impl AddressBitmap {
pub unsafe fn set_bit(&self, addr: Address) { pub unsafe fn set_bit(&self, addr: Address) {
use std::mem; use std::mem;
let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap); let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap);
mutable_bitmap.set_bit(addr.diff(self.start) >> LOG_POINTER_SIZE); mutable_bitmap.set_bit((addr - self.start) >> LOG_POINTER_SIZE);
} }
#[inline(always)] #[inline(always)]
...@@ -45,17 +45,17 @@ impl AddressBitmap { ...@@ -45,17 +45,17 @@ impl AddressBitmap {
pub unsafe fn clear_bit(&self, addr: Address) { pub unsafe fn clear_bit(&self, addr: Address) {
use std::mem; use std::mem;
let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap); let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap);
mutable_bitmap.clear_bit(addr.diff(self.start) >> LOG_POINTER_SIZE); mutable_bitmap.clear_bit((addr - self.start) >> LOG_POINTER_SIZE);
} }
#[inline(always)] #[inline(always)]
pub fn test_bit(&self, addr: Address) -> bool { pub fn test_bit(&self, addr: Address) -> bool {
self.bitmap.test_bit(addr.diff(self.start) >> LOG_POINTER_SIZE) self.bitmap.test_bit((addr - self.start) >> LOG_POINTER_SIZE)
} }
#[inline(always)] #[inline(always)]
pub fn length_until_next_bit(&self, addr: Address) -> usize { pub fn length_until_next_bit(&self, addr: Address) -> usize {
self.bitmap.length_until_next_bit(addr.diff(self.start) >> LOG_POINTER_SIZE) self.bitmap.length_until_next_bit((addr - self.start) >> LOG_POINTER_SIZE)
} }
#[inline(always)] #[inline(always)]
...@@ -67,7 +67,7 @@ impl AddressBitmap { ...@@ -67,7 +67,7 @@ impl AddressBitmap {
assert!(addr >= self.start && addr <= self.end); assert!(addr >= self.start && addr <= self.end);
} }
let index = addr.diff(self.start) >> LOG_POINTER_SIZE; let index = (addr - self.start) >> LOG_POINTER_SIZE;
let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap); let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap);
mutable_bitmap.set(index, value, length); mutable_bitmap.set(index, value, length);
} }
...@@ -78,7 +78,7 @@ impl AddressBitmap { ...@@ -78,7 +78,7 @@ impl AddressBitmap {
assert!(addr >= self.start && addr <= self.end); assert!(addr >= self.start && addr <= self.end);
} }
let index = addr.diff(self.start) >> LOG_POINTER_SIZE; let index = (addr - self.start) >> LOG_POINTER_SIZE;
self.bitmap.get(index, length) self.bitmap.get(index, length)
} }
......
...@@ -29,7 +29,7 @@ pub struct AddressMap<T: Copy> { ...@@ -29,7 +29,7 @@ pub struct AddressMap<T: Copy> {
impl <T> AddressMap<T> where T: Copy{ impl <T> AddressMap<T> where T: Copy{
pub fn new(start: Address, end: Address) -> AddressMap<T> { pub fn new(start: Address, end: Address) -> AddressMap<T> {
let len = end.diff(start) >> LOG_POINTER_SIZE; let len = (end - start) >> LOG_POINTER_SIZE;
let ptr = unsafe{malloc_zero(mem::size_of::<T>() * len)} as *mut T; let ptr = unsafe{malloc_zero(mem::size_of::<T>() * len)} as *mut T;
AddressMap{start: start, end: end, ptr: ptr, len: len} AddressMap{start: start, end: end, ptr: ptr, len: len}
...@@ -40,19 +40,19 @@ impl <T> AddressMap<T> where T: Copy{ ...@@ -40,19 +40,19 @@ impl <T> AddressMap<T> where T: Copy{
while cursor < self.end { while cursor < self.end {
self.set(cursor, init); self.set(cursor, init);
cursor = cursor.plus(POINTER_SIZE); cursor = cursor + POINTER_SIZE;
} }
} }
#[inline(always)] #[inline(always)]
pub fn set(&self, addr: Address, value: T) { pub fn set(&self, addr: Address, value: T) {
let index = (addr.diff(self.start) >> LOG_POINTER_SIZE) as isize; let index = ((addr - self.start) >> LOG_POINTER_SIZE) as isize;
unsafe{*self.ptr.offset(index) = value}; unsafe{*self.ptr.offset(index) = value};
} }
#[inline(always)] #[inline(always)]
pub fn get(&self, addr: Address) -> T { pub fn get(&self, addr: Address) -> T {
let index = (addr.diff(self.start) >> LOG_POINTER_SIZE) as isize; let index = ((addr - self.start) >> LOG_POINTER_SIZE) as isize;
unsafe {*self.ptr.offset(index)} unsafe {*self.ptr.offset(index)}
} }
} }
...@@ -64,7 +64,7 @@ impl HeapDump { ...@@ -64,7 +64,7 @@ impl HeapDump {
fn persist_object(&self, obj: Address) -> ObjectDump { fn persist_object(&self, obj: Address) -> ObjectDump {
trace!("dump object: {}", obj); trace!("dump object: {}", obj);
let hdr_addr = obj.offset(objectmodel::OBJECT_HEADER_OFFSET); let hdr_addr = obj + objectmodel::OBJECT_HEADER_OFFSET;
let hdr = unsafe {hdr_addr.load::<u64>()}; let hdr = unsafe {hdr_addr.load::<u64>()};
if objectmodel::header_is_fix_size(hdr) { if objectmodel::header_is_fix_size(hdr) {
...@@ -132,7 +132,7 @@ impl HeapDump { ...@@ -132,7 +132,7 @@ impl HeapDump {
let base = obj_dump.reference_addr; let base = obj_dump.reference_addr;
for offset in obj_dump.reference_offsets.iter() { for offset in obj_dump.reference_offsets.iter() {
let field_addr = base.plus(*offset); let field_addr = base + *offset;
let edge = unsafe {field_addr.load::<Address>()}; let edge = unsafe {field_addr.load::<Address>()};
if !edge.is_zero() && !self.objects.contains_key(&edge) { if !edge.is_zero() && !self.objects.contains_key(&edge) {
......
...@@ -50,7 +50,7 @@ impl FreeListSpace { ...@@ -50,7 +50,7 @@ impl FreeListSpace {
Err(_) => panic!("failed to call mmap") Err(_) => panic!("failed to call mmap")
}; };
let start : Address = Address::from_ptr::<u8>(anon_mmap.ptr()).align_up(SPACE_ALIGN); let start : Address = Address::from_ptr::<u8>(anon_mmap.ptr()).align_up(SPACE_ALIGN);
let end : Address = start.plus(space_size); let end : Address = start + space_size;
let trace_map = AddressMap::new(start, end); let trace_map = AddressMap::new(start, end);
let alloc_map = AddressMap::new(start, end); let alloc_map = AddressMap::new(start, end);
...@@ -102,7 +102,7 @@ impl FreeListSpace { ...@@ -102,7 +102,7 @@ impl FreeListSpace {
if res.is_zero() { if res.is_zero() {
res res
} else { } else {
res.offset(-objectmodel::OBJECT_HEADER_OFFSET) res + (-objectmodel::OBJECT_HEADER_OFFSET)
} }
} }
...@@ -259,7 +259,7 @@ struct Treadmill{ ...@@ -259,7 +259,7 @@ struct Treadmill{
impl Treadmill { impl Treadmill {
fn new(start: Address, end: Address) -> Treadmill { fn new(start: Address, end: Address) -> Treadmill {
let half_space = start.plus(end.diff(start) / 2); let half_space = start + ((end - start) / 2);
let mut from_space = vec![]; let mut from_space = vec![];
let mut to_space = vec![]; let mut to_space = vec![];
...@@ -268,12 +268,12 @@ impl Treadmill { ...@@ -268,12 +268,12 @@ impl Treadmill {
while addr < half_space { while addr < half_space {
from_space.push(TreadmillNode::new(addr)); from_space.push(TreadmillNode::new(addr));
addr = addr.plus(BLOCK_SIZE); addr = addr + BLOCK_SIZE;
} }
while addr < end { while addr < end {
to_space.push(TreadmillNode::new(addr)); to_space.push(TreadmillNode::new(addr));
addr = addr.plus(BLOCK_SIZE); addr = addr + BLOCK_SIZE;
} }
Treadmill { Treadmill {
...@@ -392,7 +392,7 @@ impl Treadmill { ...@@ -392,7 +392,7 @@ impl Treadmill {
// we need to check if 7&8, 8&9 (cursor is 7, and 8) // we need to check if 7&8, 8&9 (cursor is 7, and 8)
let mut cursor = start; let mut cursor = start;
while cursor < start + n_blocks - 1 { while cursor < start + n_blocks - 1 {
if from_space[cursor].payload.plus(BLOCK_SIZE) != from_space[cursor + 1].payload { if from_space[cursor].payload + BLOCK_SIZE != from_space[cursor + 1].payload {
return false; return false;
} }
......
...@@ -117,7 +117,7 @@ pub fn stack_scan() -> Vec<ObjectReference> { ...@@ -117,7 +117,7 @@ pub fn stack_scan() -> Vec<ObjectReference> {
ret.push(unsafe {value.to_object_reference()}); ret.push(unsafe {value.to_object_reference()});
} }
cursor = cursor.plus(POINTER_SIZE); cursor = cursor + POINTER_SIZE;
} }
let roots_from_stack = ret.len(); let roots_from_stack = ret.len();
...@@ -473,7 +473,7 @@ pub fn steal_trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectRefe ...@@ -473,7 +473,7 @@ pub fn steal_trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectRefe
// this part of code has some duplication with code in objectdump // this part of code has some duplication with code in objectdump
// FIXME: remove the duplicate code - use 'Tracer' trait // FIXME: remove the duplicate code - use 'Tracer' trait
let hdr = unsafe {addr.offset(objectmodel::OBJECT_HEADER_OFFSET).load::<u64>()}; let hdr = unsafe {(addr + objectmodel::OBJECT_HEADER_OFFSET).load::<u64>()};
if objectmodel::header_is_fix_size(hdr) { if objectmodel::header_is_fix_size(hdr) {
// fix sized type // fix sized type
...@@ -589,7 +589,7 @@ pub fn steal_process_edge(base: Address, offset: usize, local_queue:&mut Vec<Obj ...@@ -589,7 +589,7 @@ pub fn steal_process_edge(base: Address, offset: usize, local_queue:&mut Vec<Obj
#[inline(always)] #[inline(always)]
#[cfg(not(feature = "use-sidemap"))] #[cfg(not(feature = "use-sidemap"))]
pub fn steal_process_edge(base: Address, offset: usize, local_queue:&mut Vec<ObjectReference>, job_sender: &mpsc::Sender<ObjectReference>, mark_state: u8, immix_space: &ImmixSpace, lo_space: &FreeListSpace) { pub fn steal_process_edge(base: Address, offset: usize, local_queue:&mut Vec<ObjectReference>, job_sender: &mpsc::Sender<ObjectReference>, mark_state: u8, immix_space: &ImmixSpace, lo_space: &FreeListSpace) {
let field_addr = base.plus(offset); let field_addr = base + offset;
let edge = unsafe {field_addr.load::<ObjectReference>()}; let edge = unsafe {field_addr.load::<ObjectReference>()};
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
......
...@@ -18,6 +18,7 @@ use heap::immix::immix_space::ImmixBlock; ...@@ -18,6 +18,7 @@ use heap::immix::immix_space::ImmixBlock;
use heap::gc; use heap::gc;
use objectmodel; use objectmodel;
use utils::Address; use utils::Address;
use utils::ByteSize;
use std::*; use std::*;
use std::sync::Arc; use std::sync::Arc;
...@@ -161,7 +162,7 @@ impl ImmixMutatorLocal { ...@@ -161,7 +162,7 @@ impl ImmixMutatorLocal {
} }
let start = self.cursor.align_up(align); let start = self.cursor.align_up(align);
let end = start.plus(size); let end = start + size;
if TRACE_ALLOC_FASTPATH { if TRACE_ALLOC_FASTPATH {
trace!("Mutator{}: fastpath alloc: start=0x{:x}, end=0x{:x}", self.id, start, end); trace!("Mutator{}: fastpath alloc: start=0x{:x}, end=0x{:x}", self.id, start, end);
...@@ -182,7 +183,7 @@ impl ImmixMutatorLocal { ...@@ -182,7 +183,7 @@ impl ImmixMutatorLocal {
} }
// this offset should be removed as well (for performance) // this offset should be removed as well (for performance)
ret.offset(-objectmodel::OBJECT_HEADER_OFFSET) ret + (-objectmodel::OBJECT_HEADER_OFFSET)
} else { } else {
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
if !start.is_aligned_to(align) { if !start.is_aligned_to(align) {
...@@ -193,7 +194,7 @@ impl ImmixMutatorLocal { ...@@ -193,7 +194,7 @@ impl ImmixMutatorLocal {
} }
self.cursor = end; self.cursor = end;
start.offset(-objectmodel::OBJECT_HEADER_OFFSET) start + (-objectmodel::OBJECT_HEADER_OFFSET)
} }
} }
...@@ -211,7 +212,7 @@ impl ImmixMutatorLocal { ...@@ -211,7 +212,7 @@ impl ImmixMutatorLocal {
#[cfg(not(feature = "use-sidemap"))] #[cfg(not(feature = "use-sidemap"))]
pub fn init_object(&mut self, addr: Address, encode: u64) { pub fn init_object(&mut self, addr: Address, encode: u64) {
unsafe { unsafe {
addr.offset(objectmodel::OBJECT_HEADER_OFFSET).store(encode); (addr + objectmodel::OBJECT_HEADER_OFFSET).store(encode);
} }
} }
...@@ -225,7 +226,7 @@ impl ImmixMutatorLocal { ...@@ -225,7 +226,7 @@ impl ImmixMutatorLocal {
pub fn init_hybrid(&mut self, addr: Address, encode: u64, len: u64) { pub fn init_hybrid(&mut self, addr: Address, encode: u64, len: u64) {
let encode = encode | ((len << objectmodel::SHR_HYBRID_LENGTH) & objectmodel::MASK_HYBRID_LENGTH); let encode = encode | ((len << objectmodel::SHR_HYBRID_LENGTH) & objectmodel::MASK_HYBRID_LENGTH);
unsafe { unsafe {
addr.offset(objectmodel::OBJECT_HEADER_OFFSET).store(encode); (addr + objectmodel::OBJECT_HEADER_OFFSET).store(encode);
} }
} }
...@@ -242,11 +243,11 @@ impl ImmixMutatorLocal { ...@@ -242,11 +243,11 @@ impl ImmixMutatorLocal {
// we can alloc from local blocks // we can alloc from local blocks
let end_line = self.block().get_next_unavailable_line(next_available_line); let end_line = self.block().get_next_unavailable_line(next_available_line);
self.cursor = self.block().start().plus(next_available_line << immix::LOG_BYTES_IN_LINE); self.cursor = self.block().start() + (next_available_line << immix::LOG_BYTES_IN_LINE);
self.limit = self.block().start().plus(end_line << immix::LOG_BYTES_IN_LINE); self.limit = self.block().start() + (end_line << immix::LOG_BYTES_IN_LINE);
self.line = end_line; self.line = end_line;
self.cursor.memset(0, self.limit.diff(self.cursor)); unsafe {self.cursor.memset(0, self.limit - self.cursor);}
for line in next_available_line..end_line { for line in next_available_line..end_line {
self.block().line_mark_table_mut().set(line, immix::LineMark::FreshAlloc); self.block().line_mark_table_mut().set(line, immix::LineMark::FreshAlloc);
...@@ -254,7 +255,7 @@ impl ImmixMutatorLocal { ...@@ -254,7 +255,7 @@ impl ImmixMutatorLocal {
// allocate fast path // allocate fast path
let start = self.cursor.align_up(align); let start = self.cursor.align_up(align);
let end = start.plus(size); let end = start + size;
self.cursor = end; self.cursor = end;
start start
...@@ -344,9 +345,9 @@ impl ImmixMutatorLocal { ...@@ -344,9 +345,9 @@ impl ImmixMutatorLocal {
pub fn print_object_static(obj: Address, length: usize) { pub fn print_object_static(obj: Address, length: usize) {
debug!("===Object {:#X} size: {} bytes===", obj, length); debug!("===Object {:#X} size: {} bytes===", obj, length);
let mut cur_addr = obj; let mut cur_addr = obj;
while cur_addr < obj.plus(length) { while cur_addr < obj + length {
debug!("Address: {:#X} {:#X}", cur_addr, unsafe {cur_addr.load::<u64>()}); debug!("Address: {:#X} {:#X}", cur_addr, unsafe {cur_addr.load::<u64>()});
cur_addr = cur_addr.plus(8); cur_addr = cur_addr + 8 as ByteSize;
} }
debug!("----"); debug!("----");
debug!("========="); debug!("=========");
......
...@@ -45,7 +45,7 @@ pub struct LineMarkTableSlice { ...@@ -45,7 +45,7 @@ pub struct LineMarkTableSlice {
impl LineMarkTable { impl LineMarkTable {
pub fn new(space_start: Address, space_end: Address) -> LineMarkTable { pub fn new(space_start: Address, space_end: Address) -> LineMarkTable {
let line_mark_table_len = space_end.diff(space_start) / immix::BYTES_IN_LINE; let line_mark_table_len = (space_end - space_start) / immix::BYTES_IN_LINE;
let line_mark_table = { let line_mark_table = {
let ret = unsafe {malloc_zero(mem::size_of::<immix::LineMark>() * line_mark_table_len)} as *mut immix::LineMark; let ret = unsafe {malloc_zero(mem::size_of::<immix::LineMark>() * line_mark_table_len)} as *mut immix::LineMark;
let mut cursor = ret; let mut cursor = ret;
...@@ -79,12 +79,12 @@ impl LineMarkTable { ...@@ -79,12 +79,12 @@ impl LineMarkTable {
} }
pub fn index_to_address(&self, index: usize) -> Address { pub fn index_to_address(&self, index: usize) -> Address {
self.space_start.plus(index << immix::LOG_BYTES_IN_LINE) self.space_start + (index << immix::LOG_BYTES_IN_LINE)
} }
#[inline(always)] #[inline(always)]
pub fn mark_line_live(&self, addr: Address) { pub fn mark_line_live(&self, addr: Address) {
let line_table_index = addr.diff(self.space_start) >> immix::LOG_BYTES_IN_LINE; let line_table_index = (addr - self.space_start) >> immix::LOG_BYTES_IN_LINE;
self.set(line_table_index, immix::LineMark::Live); self.set(line_table_index, immix::LineMark::Live);
...@@ -95,7 +95,7 @@ impl LineMarkTable { ...@@ -95,7 +95,7 @@ impl LineMarkTable {
#[inline(always)] #[inline(always)]
pub fn mark_line_live2(&self, space_start: Address, addr: Address) { pub fn mark_line_live2(&self, space_start: Address, addr: Address) {
let line_table_index = addr.diff(space_start) >> immix::LOG_BYTES_IN_LINE; let line_table_index = (addr - space_start) >> immix::LOG_BYTES_IN_LINE;
self.set(line_table_index, immix::LineMark::Live); self.set(line_table_index, immix::LineMark::Live);
...@@ -165,7 +165,7 @@ impl ImmixSpace { ...@@ -165,7 +165,7 @@ impl ImmixSpace {
Err(_) => panic!("failed to call mmap"), Err(_) => panic!("failed to call mmap"),
}; };
let start : Address = Address::from_ptr::<u8>(anon_mmap.ptr()).align_up(SPACE_ALIGN); let start : Address = Address::from_ptr::<u8>(anon_mmap.ptr()).align_up(SPACE_ALIGN);
let end : Address = start.plus(space_size); let end : Address = start + space_size;
let line_mark_table = LineMarkTable::new(start, end); let line_mark_table = LineMarkTable::new(start, end);
...@@ -204,7 +204,7 @@ impl ImmixSpace { ...@@ -204,7 +204,7 @@ impl ImmixSpace {
let mut usable_blocks_lock = self.usable_blocks.lock().unwrap(); let mut usable_blocks_lock = self.usable_blocks.lock().unwrap();