WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit dc48e635 authored by Javad Ebrahimian Amiri's avatar Javad Ebrahimian Amiri
Browse files

going to reapply changes, to get rid of broken ones!

parent 4b4b4dfc
......@@ -68,7 +68,7 @@ impl Allocator for ImmixAllocator {
#[inline(always)]
fn alloc(&mut self, size: usize, align: usize) -> Address {
trace!("immix_mutator::alloc(self, {}, {});", size, align);
trace!("immix_mutator::alloc({}, {}, {});", &self, size, align);
// this part of code will slow down allocation
let align = objectmodel::check_alignment(align);
// end
......@@ -129,7 +129,7 @@ impl ImmixAllocator {
#[inline(never)]
pub fn alloc_slow(&mut self, size: usize, align: usize) -> Address {
trace!("immix_mutator::alloc_slow(self, {}, {});", size, align);
trace!("immix_mutator::alloc_slow({}, {}, {});", &self, size, align);
if size > BYTES_IN_LINE {
trace_if!(TRACE_ALLOC, "Mutator: overflow alloc()");
self.overflow_alloc(size, align)
......@@ -173,6 +173,7 @@ impl ImmixAllocator {
#[inline(always)]
pub fn init_object<T>(&mut self, addr: Address, encode: T) {
trace!("init_object({}, _)", addr);
let map_slot = ImmixSpace::get_type_byte_slot_static(addr);
unsafe {
map_slot.store(encode);
......@@ -185,7 +186,8 @@ impl ImmixAllocator {
align: usize
) -> Address {
trace!(
"immix_mutator::try_alloc_from_local(self, {}, {});",
"immix_mutator::try_alloc_from_local({}, {}, {});",
&self,
size,
align
);
......@@ -238,7 +240,8 @@ impl ImmixAllocator {
request_large: bool
) -> Address {
trace!(
"immix_mutator::alloc_from_global(self, {}, {}, {});",
"immix_mutator::alloc_from_global({}, {}, {}, {});",
&self,
size,
align,
request_large
......
......@@ -301,6 +301,7 @@ pub extern "C" fn remove_root(obj: ObjectReference) {
/// pins an object so that it will be moved or reclaimed
#[no_mangle]
pub extern "C" fn muentry_pin_object(obj: ObjectReference) -> Address {
trace!("gc::src::lib::muentry_pin_object");
add_to_root(obj);
obj.to_address()
}
......@@ -308,6 +309,7 @@ pub extern "C" fn muentry_pin_object(obj: ObjectReference) -> Address {
/// unpins an object so that it can be freely moved/reclaimed as normal objects
#[no_mangle]
pub extern "C" fn muentry_unpin_object(obj: Address) {
trace!("gc::src::lib::muentry_unpin_object");
remove_root(unsafe { obj.to_object_reference() });
}
......@@ -339,6 +341,7 @@ pub extern "C" fn muentry_alloc_tiny(
align: usize
) -> ObjectReference {
let m = mutator_ref(mutator);
trace!("gc::src::lib::muentry_alloc_tiny({}, {})", size, align);
unsafe { m.tiny.alloc(size, align).to_object_reference() }
}
......@@ -349,6 +352,7 @@ pub extern "C" fn muentry_alloc_normal(
align: usize
) -> ObjectReference {
let m = mutator_ref(mutator);
trace!("gc::src::lib::muentry_alloc_normal({}, {})", size, align);
let res = m.normal.alloc(size, align);
m.normal.post_alloc(res, size);
unsafe { res.to_object_reference() }
......@@ -362,8 +366,8 @@ pub extern "C" fn muentry_alloc_tiny_slow(
size: usize,
align: usize
) -> Address {
trace!("gc::src::lib::muentry_alloc_tiny_slow");
let m = mutator_ref(mutator);
trace!("gc::src::lib::muentry_alloc_tiny_slow({}, {})", size, align);
m.tiny.alloc_slow(size, align)
}
......@@ -375,8 +379,8 @@ pub extern "C" fn muentry_alloc_normal_slow(
size: usize,
align: usize
) -> Address {
trace!("gc::src::lib::muentry_alloc_normal_slow");
let m = mutator_ref(mutator);
trace!("gc::src::lib::muentry_alloc_normal_slow({}, {})", size, align);
let res = m.normal.alloc_slow(size, align);
m.normal.post_alloc(res, size);
res
......@@ -391,6 +395,7 @@ pub extern "C" fn muentry_alloc_large(
align: usize
) -> ObjectReference {
let m = mutator_ref(mutator);
trace!("gc::src::lib::muentry_alloc_large({}, {})", size, align);
let res = m.lo.alloc(size, align);
unsafe { res.to_object_reference() }
}
......@@ -402,6 +407,7 @@ pub extern "C" fn muentry_init_tiny_object(
obj: ObjectReference,
encode: TinyObjectEncode
) {
trace!("gc::src::lib::muentry_init_tiny_object");
unsafe { &mut *mutator }
.tiny
.init_object(obj.to_address(), encode);
......@@ -414,6 +420,7 @@ pub extern "C" fn muentry_init_small_object(
obj: ObjectReference,
encode: SmallObjectEncode
) {
trace!("gc::src::lib::muentry_init_small_object");
unsafe { &mut *mutator }
.normal
.init_object(obj.to_address(), encode);
......@@ -426,6 +433,7 @@ pub extern "C" fn muentry_init_medium_object(
obj: ObjectReference,
encode: MediumObjectEncode
) {
trace!("gc::src::lib::muentry_init_medium_object");
unsafe { &mut *mutator }
.normal
.init_object(obj.to_address(), encode);
......@@ -437,6 +445,7 @@ pub extern "C" fn muentry_init_large_object(
obj: ObjectReference,
encode: LargeObjectEncode
) {
trace!("gc::src::lib::muentry_init_large_object");
unsafe { &mut *mutator }
.lo
.init_object(obj.to_address(), encode);
......
......@@ -3882,7 +3882,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
"exc_clause is not implemented for EALLOC"
);
let impl_allocty = self.get_built_type(allocty);
let impl_rvtype = self.ensure_uptr(allocty);
let impl_rvtype = self.ensure_ref(allocty);
let impl_rv =
self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
assert_ir!(!impl_allocty.is_hybrid());
......@@ -3910,7 +3910,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
let impl_allocty = self.get_built_type(allocty);
let impl_lenty = self.get_built_type(lenty);
let impl_length = self.get_treenode(fcb, length);
let impl_rvtype = self.ensure_uptr(allocty);
let impl_rvtype = self.ensure_ref(allocty);
let impl_rv =
self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
......@@ -3938,7 +3938,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
// let impl_ptrty = self.ensure_type_rec(ptrty);
let ptrtype = self.ensure_type_rec(ptrty);
let ptr = self.get_treenode(fcb, ptr);
assert_ir!(ptrtype.is_ptr());
assert_ir!(ptrtype.is_ref());
assert_ir!(ptr.ty() == ptrtype);
Instruction {
......@@ -4005,7 +4005,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
);
let reg = self.get_treenode(fcb, reg as usize);
let impl_allocty = self.get_built_type(allocty);
let impl_rvtype = self.ensure_uptr(allocty);
let impl_rvtype = self.ensure_ref(allocty);
let impl_rv =
self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
......@@ -4036,7 +4036,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
let impl_allocty = self.get_built_type(allocty);
let impl_lenty = self.get_built_type(lenty);
let impl_length = self.get_treenode(fcb, length);
let impl_rvtype = self.ensure_uptr(allocty);
let impl_rvtype = self.ensure_ref(allocty);
let impl_rv =
self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
......
......@@ -305,7 +305,7 @@ def test_collision_detection():
start = time.time()
res = subprocess.call(
'sudo LD_LIBRARY_PATH=$PWD/emit:$LD_LIBRARY_PATH MU_LOG_LEVEL=debug %s' % exec_path, shell=True)
'sudo LD_LIBRARY_PATH=$PWD/emit:$LD_LIBRARY_PATH MU_LOG_LEVEL=trace %s' % exec_path, shell=True)
end = time.time()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment