WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit f83b014b authored by Javad Ebrahimian Amiri's avatar Javad Ebrahimian Amiri
Browse files

The thesis tested version (needs clean-up)

parent 3aef3b51
......@@ -102,6 +102,10 @@ impl Instruction {
| AllocAUHybrid(_,_)
| NewReg(_)
| DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| rAlloc(_, _)
| rAllocHybrid(_, _, _)
| eAlloc(_)
......@@ -201,6 +205,7 @@ impl Instruction {
| AllocAUHybrid(_,_)
| NewReg(_)
| DeleteReg(_)
| Intrinsic_CollectReg(_)
| rAlloc(_, _)
| rAllocHybrid(_,_,_)
| eAlloc(_)
......@@ -302,6 +307,9 @@ impl Instruction {
| AttrGetPriority(_)
| RandF(_,_)
| RandI(_,_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| Intrinsic_FindReg(_)
| ThreadIssetCPU(_, _) => false,
// | AffinityEqual(_, _)
}
......@@ -337,6 +345,10 @@ impl Instruction {
| AllocAUHybrid(_, _)
| NewReg(_)
| DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| rAlloc(_, _)
| rAllocHybrid(_, _, _)
| eAlloc(_)
......@@ -347,7 +359,7 @@ impl Instruction {
| NewFutex(_)
| DeleteFutex(_)
| LockFutex(_, _)
| UnlockFutex(_,_)
| UnlockFutex(_, _)
| CondVarNew
| CondVarDelete(_)
| CondVarAcqLock(_)
......@@ -463,6 +475,10 @@ impl Instruction {
| AllocAUHybrid(_,_)
| NewReg(_)
| DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| rAlloc(_, _)
| rAllocHybrid(_,_,_)
// | rAllocT(_)
......@@ -782,6 +798,18 @@ impl Instruction {
&Instruction_::DeleteReg(regref) => {
format!("COMMINST @uvm.delete_region({})", ops[regref])
}
&Instruction_::Intrinsic_CollectReg(regref) => {
format!("INTRINSIC @uvm.collect_region({})", ops[regref])
}
&Instruction_::Intrinsic_FindReg(regref) => {
format!("INTRINSIC @uvm.find_region({})", ops[regref])
}
&Instruction_::Intrinsic_GetAllocatedReg(regref) => {
format!("INTRINSIC @uvm.get_allocated_region({})", ops[regref])
}
&Instruction_::Intrinsic_GetFreeReg(regref) => {
format!("INTRINSIC @uvm.get_free_region({})", ops[regref])
}
&Instruction_::AllocAU(ref ty) => {
format!("COMMINST @uvm.AllocAU({})", ty.id())
}
......@@ -840,7 +868,9 @@ impl Instruction {
)
}
&Instruction_::NewFutex(init_val) => format!("NEWFUTEX {}", ops[init_val]),
&Instruction_::NewFutex(init_val) => {
format!("NEWFUTEX {}", ops[init_val])
}
&Instruction_::DeleteFutex(futexref) => {
format!("DELETEFUTEX {}", ops[futexref])
}
......@@ -851,9 +881,7 @@ impl Instruction {
format!("UNLOCKFUTEX {}, {}", ops[futexref], ops[count])
}
&Instruction_::CondVarNew => {
format!("CONDVAR_NEW")
}
&Instruction_::CondVarNew => format!("CONDVAR_NEW"),
&Instruction_::CondVarDelete(addr) => {
format!("CONDVAR_DELETE {}", ops[addr])
}
......@@ -1423,6 +1451,11 @@ pub enum Instruction_ {
/// args: regionref to the target region
DeleteReg(OpIndex),
Intrinsic_CollectReg(OpIndex),
Intrinsic_FindReg(OpIndex),
Intrinsic_GetFreeReg(OpIndex),
Intrinsic_GetAllocatedReg(OpIndex),
BindRegion(OpIndex),
UnbindRegion(OpIndex),
......
......@@ -708,6 +708,10 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize {
| NewHybrid(_, _)
| NewReg(_)
| DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetFreeReg(_)
| Intrinsic_GetAllocatedReg(_)
| rAlloc(_, _)
| rAllocHybrid(_, _, _)
| eAlloc(_)
......@@ -726,7 +730,7 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize {
| CondVarBroadcast(_)
| DeleteFutex(_)
| LockFutex(_, _)
| UnlockFutex(_,_)
| UnlockFutex(_, _)
| NotifyThread(_)
| ThreadSetPriority(_, _)
| ThreadGetPriority(_)
......
......@@ -83,6 +83,10 @@ fn is_suitable_child(inst: &Instruction) -> bool {
| AllocAUHybrid(_, _)
| NewReg(_)
| DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetFreeReg(_)
| Intrinsic_GetAllocatedReg(_)
| rAlloc(_, _)
| rAllocHybrid(_, _, _)
| eAlloc(_)
......@@ -94,7 +98,7 @@ fn is_suitable_child(inst: &Instruction) -> bool {
| NewFutex(_)
| DeleteFutex(_)
| LockFutex(_, _)
| UnlockFutex(_,_)
| UnlockFutex(_, _)
| CondVarNew
| CondVarDelete(_)
| CondVarAcqLock(_)
......
......@@ -38,6 +38,7 @@ pub extern crate mu_ast as ast;
#[macro_use]
pub extern crate mu_utils as utils;
extern crate core;
extern crate memsec;
pub extern crate mu_gc as gc;
extern crate proc_macro;
extern crate rand;
......
......@@ -23,3 +23,5 @@ pub const MM_RTMU_INFO: bool = true;
pub const MM_MU_TRACE: bool = false;
pub const MM_MU_DEBUG: bool = false;
pub const MM_MU_INFO: bool = false;
pub const IRBLDR_TRACE: bool = true;
......@@ -168,6 +168,30 @@ lazy_static! {
vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![] // returns nothing
);
/// Resets an EMM region
pub static ref COLLECT_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_collect_reg",
vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![] // returns nothing
);
/// Returns the regionref for an object, or null if the object is not in any region
pub static ref FIND_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_find_reg",
vec![ADDRESS_TYPE.clone()], // (ref<T>)
vec![ADDRESS_TYPE.clone()] // returns regionref
);
/// Returns the number of free bytes in an EMM region
pub static ref GET_FREE_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_get_free_reg",
vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![UINT64_TYPE.clone()] // returns usize
);
/// Returns the number of allocated bytes in an EMM region
pub static ref GET_ALLOCATED_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_get_allocated_reg",
vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![UINT64_TYPE.clone()] // returns usize
);
/// Untraced rAlloc for non-hybrid types
pub static ref RALLOC: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_ralloc",
......
......@@ -16,6 +16,7 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::RwLock;
use utils::mem::memsec::memzero;
//use super::super::super::log_settings;
use super::mm_rtmu_std::*;
use super::*;
......@@ -25,6 +26,8 @@ lazy_static! {
RwLock::new(HashSet::new());
pub static ref EMM_MAP: RwLock<HashMap<usize, usize>> =
RwLock::new(HashMap::new());
pub static ref REGION_LIMITS: RwLock<Vec<(Address, usize)>> =
RwLock::new(Vec::new());
}
impl fmt::Debug for EMM_ROOTS {
......@@ -96,7 +99,10 @@ impl RegionRootSet {
}
}
use ast::op::BinOp::Add;
use memsec::memset;
use std::fmt;
impl fmt::Debug for RegionRootSet {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let lock = self.refs.read().unwrap();
......@@ -230,6 +236,21 @@ impl EMMRegion {
emm_region
}
pub fn collect_all(&mut self) {
// use utils::mem::memsec::memset;
self.freeptr = Address::from_mut_ptr(self.emmbackstore.sysbackstore);
// unsafe { memset(self.emmbackstore.sysbackstore, 0 as i32, self.size)
// };
}
pub fn get_free_space(&mut self) -> usize {
return self.size - (self.freeptr - self.emmbackstore.get_base_addr());
}
pub fn get_allocated_space(&mut self) -> usize {
return self.freeptr - self.emmbackstore.get_base_addr();
}
/// Deletes the backstore memory allocated as part of `new(_size)` method.
///
/// Note that the `EMMRegion` object itself should be deleted separately.
......@@ -261,6 +282,7 @@ impl EMMRegion {
// "RALLOC: allocating {} bytes",
// _size
// );
unsafe { memzero(self.freeptr.to_ptr_mut::<u8>(), _size) };
self.freeptr += _size as ByteOffset;
res
} else {
......@@ -290,22 +312,114 @@ impl EMMRegion {
}
}
fn add_region_limits(addr: Address, size: usize) {
// let iaddr = addr.as_usize();
let mut wl = REGION_LIMITS.write().unwrap();
wl.push((addr, size));
}
fn remove_region_limits(addr: Address) {
// let iaddr = addr.as_usize();
let mut wl = REGION_LIMITS.write().unwrap();
for i in 0..wl.len() {
let cia = wl[i];
if cia.0 == addr {
wl.remove(i as usize);
break;
}
}
}
fn find_obj_region(obj_addr: Address) -> Address {
let iaddr = obj_addr;
let rl = REGION_LIMITS.read().unwrap();
for i in 0..rl.len() {
let cia = rl[i];
let cs = cia.1;
let cia = cia.0;
if (cia <= iaddr) && (iaddr < (cia + cs)) {
// let res = unsafe { Address::from_usize(cia) };
let res = cia;
info_if!(
log_settings::MM_RTMU_INFO,
"find_obj_region.returning({})",
res
);
return res;
}
}
let res = unsafe { Address::from_usize(0 as usize) };
info_if!(
log_settings::MM_RTMU_INFO,
"find_obj_region.returning({})",
res
);
res
}
#[no_mangle]
pub extern "C" fn muentry_new_reg(size: usize) -> Address {
info_if!(log_settings::MM_RTMU_INFO, "=== NEW REGION ===");
let new_region = Box::new(EMMRegion::new(size));
info_if!(log_settings::MM_RTMU_INFO, "- {:?}", new_region);
Address::from_ptr(Box::into_raw(new_region))
let res = Address::from_ptr(Box::into_raw(new_region));
add_region_limits(res, size);
res
}
#[no_mangle]
pub extern "C" fn muentry_delete_reg(regionref: Address) {
remove_region_limits(regionref);
info_if!(log_settings::MM_RTMU_INFO, "=== DELETE REGION ===");
let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
regionbox.delete_backstore();
}
#[no_mangle]
pub extern "C" fn muentry_collect_reg(regionref: Address) {
info_if!(log_settings::MM_RTMU_INFO, "=== COLLECT REGION ===");
let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
regionbox.collect_all();
let reg_ptr = Box::into_raw(regionbox);
}
#[no_mangle]
pub extern "C" fn muentry_find_reg(obj_ref: Address) -> Address {
info_if!(
log_settings::MM_RTMU_INFO,
"=== FIND REGION ({:#?})===",
obj_ref
);
let res = find_obj_region(obj_ref);
res
}
#[no_mangle]
pub extern "C" fn muentry_get_free_reg(regionref: Address) -> usize {
info_if!(log_settings::MM_RTMU_INFO, "=== GET FREE REGION ===");
let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
let res = regionbox.get_free_space();
let reg_ptr = Box::into_raw(regionbox);
res
}
#[no_mangle]
pub extern "C" fn muentry_get_allocated_reg(regionref: Address) -> usize {
info_if!(log_settings::MM_RTMU_INFO, "=== GET ALLOCATED REGION ===");
let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
let res = regionbox.get_allocated_space();
let reg_ptr = Box::into_raw(regionbox);
res
}
/// Given the Address of a region struct `EMMRegion`,
/// allocates a space of `size` length
/// (may allocate more to preserve alignment)
......
This diff is collapsed.
......@@ -555,6 +555,10 @@ pub const CMU_CI_UVM_IRBUILDER_EMM_UNBINDOBJECT: CMuCommInst = 0x403;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_NEWREGIONPA: CMuCommInst = 0x408;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_BINDREGION: CMuCommInst = 0x409;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_UNBINDREGION: CMuCommInst = 0x40a;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_COLLECTREGION: CMuCommInst = 0x40b;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_GETFREE: CMuCommInst = 0x40c;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_GETALLOCATED: CMuCommInst = 0x40d;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_FIND: CMuCommInst = 0x40e;
pub const CMU_CI_UVM_IRBUILDER_SCHED_FIFO: CMuCommInst = 0x410;
pub const CMU_CI_UVM_IRBUILDER_SCHED_RM: CMuCommInst = 0x411;
pub const CMU_CI_UVM_IRBUILDER_SCHED_EDF: CMuCommInst = 0x412;
......
......@@ -1577,9 +1577,21 @@ impl MuIRBuilder {
self.add_inst(id, NodeInst::NodeYield { id, exc_clause });
}
pub fn new_newfutex(&mut self, id: MuID, init_val: MuVarNode, result_id: MuID) {
pub fn new_newfutex(
&mut self,
id: MuID,
init_val: MuVarNode,
result_id: MuID
) {
trace!("new_newfutex");
self.add_inst(id, NodeInst::NodeNewFutex { id, init_val, result_id });
self.add_inst(
id,
NodeInst::NodeNewFutex {
id,
init_val,
result_id
}
);
}
pub fn new_newattr(&mut self, id: MuID, result_id: MuID) {
......@@ -2664,7 +2676,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
NodeType::TypeTimerRef { id: _ } => MuType_::TimerRef,
#[cfg(feature = "realtime")]
NodeType::TypeFutexRef { id: _ } => MuType_::FutexRef,
NodeType::TypeCondVarRef {id: _ } => MuType_::CondVarRef,
NodeType::TypeCondVarRef { id: _ } => MuType_::CondVarRef,
ref t => panic!("{:?} not implemented", t)
};
......@@ -4677,7 +4689,11 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
unimplemented!();
}
NodeInst::NodeNewFutex { id, init_val, result_id } => {
NodeInst::NodeNewFutex {
id,
init_val,
result_id
} => {
let iv = self.get_treenode(fcb, init_val);
let futexref_t = self.ensure_futexref();
let rv = self.new_ssa(fcb, result_id, futexref_t).clone_value();
......@@ -4961,7 +4977,12 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
// }
let my_index = ops.len();
let op = self.add_opnd(fcb, ops, *vid);
assert_ir!(op.ty() == arg.ty, "op.ty() is -{:#?}- but arg.ty is -{:#?}", op.ty(), arg.ty);
assert_ir!(
op.ty() == arg.ty,
"op.ty() is -{:#?}- but arg.ty is -{:#?}",
op.ty(),
arg.ty
);
DestArg::Normal(my_index)
})
.collect::<Vec<_>>();
......@@ -5042,6 +5063,15 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
is_ccall: bool,
call_conv: CallConvention
) -> CallData {
use log_settings::IRBLDR_TRACE;
trace_if!(
IRBLDR_TRACE,
"build_call_data(fid: {}, \n\t args_id: \n\t\t{:#?})",
callee,
args
);
let func_index = ops.len();
let callee = self.add_opnd(fcb, ops, callee);
......@@ -5860,6 +5890,87 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
v: Instruction_::CondVarBroadcast(0)
}
}
CMU_CI_UVM_IRBUILDER_REGIONS_COLLECTREGION => {
assert_ir!(
tys.is_empty()
&& flags.is_empty()
&& exc_clause.is_none()
&& keepalives.is_none()
);
assert_eq!(args.len(), 1);
let impl_reg = self.get_treenode(fcb, args[0]);
Instruction {
hdr: hdr,
value: None,
ops: vec![impl_reg],
v: Instruction_::Intrinsic_CollectReg(0)
}
}
CMU_CI_UVM_IRBUILDER_REGIONS_FIND => {
assert_ir!(
tys.is_empty()
&& flags.is_empty()
&& exc_clause.is_none()
&& keepalives.is_none()
);
assert_eq!(args.len(), 1);
let impl_reg = self.get_treenode(fcb, args[0]);
let impl_p = self.ensure_regionref();
let impl_rv =
self.new_ssa(fcb, result_ids[0], impl_p).clone_value();
Instruction {
hdr: hdr,
value: Some(vec![impl_rv]),
ops: vec![impl_reg],
v: Instruction_::Intrinsic_FindReg(0)
}
}
CMU_CI_UVM_IRBUILDER_REGIONS_GETALLOCATED => {
assert_ir!(
tys.is_empty()
&& flags.is_empty()
&& exc_clause.is_none()
&& keepalives.is_none()
);
assert_eq!(args.len(), 1);
let impl_reg = self.get_treenode(fcb, args[0]);
let impl_p = self.ensure_i64();
let impl_rv =
self.new_ssa(fcb, result_ids[0], impl_p).clone_value();
Instruction {
hdr: hdr,
value: Some(vec![impl_rv]),
ops: vec![impl_reg],
v: Instruction_::Intrinsic_GetAllocatedReg(0)
}
}
CMU_CI_UVM_IRBUILDER_REGIONS_GETFREE => {
assert_ir!(
tys.is_empty()
&& flags.is_empty()
&& exc_clause.is_none()
&& keepalives.is_none()
);
assert_eq!(args.len(), 1);
let impl_reg = self.get_treenode(fcb, args[0]);
let impl_p = self.ensure_i64();
let impl_rv =
self.new_ssa(fcb, result_ids[0], impl_p).clone_value();
Instruction {
hdr: hdr,
value: Some(vec![impl_rv]),
ops: vec![impl_reg],
v: Instruction_::Intrinsic_GetFreeReg(0)
}
}
_ => unimplemented!()
}
}
......
......@@ -1051,6 +1051,10 @@ struct MuIRBuilder {
#define MU_CI_UVM_IRBUILDER_REGIONS_NEWREGIONPA ((MuCommInst)0x408) /// MUAPIPARSER muname:@uvm.irbuilder.regions.newregionpa
#define MU_CI_UVM_IRBUILDER_REGIONS_BINDREGION ((MuCommInst)0x409) /// MUAPIPARSER muname:@uvm.irbuilder.regions.bindregion
#define MU_CI_UVM_IRBUILDER_REGIONS_UNBINDREGION ((MuCommInst)0x40a) /// MUAPIPARSER muname:@uvm.irbuilder.region.unbindregion
#define MU_CI_UVM_IRBUILDER_REGIONS_COLLECTREGION ((MuCommInst)0x40b) /// MUAPIPARSER muname:@uvm.irbuilder.region.collectregion
#define MU_CI_UVM_IRBUILDER_REGIONS_GETFREE ((MuCommInst)0x40c) /// MUAPIPARSER muname:@uvm.irbuilder.region.getfree
#define MU_CI_UVM_IRBUILDER_REGIONS_GETALLOCATED ((MuCommInst)0x40d) /// MUAPIPARSER muname:@uvm.irbuilder.region.getallocated
#define MU_CI_UVM_IRBUILDER_REGIONS_FIND ((MuCommInst)0x40e) /// MUAPIPARSER muname:@uvm.irbuilder.region.find
#define MU_CI_UVM_IRBUILDER_SCHED_FIFO ((MuCommInst)0x410) /// MUAPIPARSER muname:@uvm.irbuilder.sched.fifo
#define MU_CI_UVM_IRBUILDER_SCHED_RM ((MuCommInst)0x411) /// MUAPIPARSER muname:@uvm.irbuilder.sched.rm
#define MU_CI_UVM_IRBUILDER_SCHED_EDF ((MuCommInst)0x412) /// MUAPIPARSER muname:@uvm.irbuilder.sched.edf
......
......@@ -26,7 +26,7 @@ import pytest
import time
logger = AnsiLogger('rt_rpython_tests')
cur_dir = py.path.local()
# --------------------------
# tests
# @may_spawn_proc
......@@ -449,6 +449,36 @@ def test_bytearray():
assert res == 101
@may_spawn_proc
def test_forloop():
from rpython.dev.dev_for import test_for
logger.info('going to build the test function')
exec_path = executable_from_rpy_func(test_for, [], rffi.CHAR)
logger.info('going to run the test function')
# import subprocess
# subprocess.call([fn()])
# subprocess.check_output([fn()])
import subprocess
import os
start = time.time()
res = subprocess.call(
'sudo LD_LIBRARY_PATH=$PWD/emit:$LD_LIBRARY_PATH MU_LOG_LEVEL=info %s' % exec_path, shell=True)
end = time.time()
logger.info("Test took: ")
logger.info(end-start)
logger.info('returned res = %d' % res)
assert res == 101
@may_spawn_proc
def test_collision_detection():
from rpython.dev.dev_CD.main import Main
......@@ -480,6 +510,51 @@ def test_collision_detection():
assert 0 <= res <= 50
@may_spawn_proc
def test_collision_detection_100times():
from rpython.dev.dev_CD.main import Main
logger.info('going to build the test function')
exec_path = executable_from_rpy_func(Main.main, [rffi.CCHARP],