WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit f83b014b authored by Javad Ebrahimian Amiri's avatar Javad Ebrahimian Amiri
Browse files

The thesis tested version (needs clean-up)

parent 3aef3b51
...@@ -102,6 +102,10 @@ impl Instruction { ...@@ -102,6 +102,10 @@ impl Instruction {
| AllocAUHybrid(_,_) | AllocAUHybrid(_,_)
| NewReg(_) | NewReg(_)
| DeleteReg(_) | DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| rAlloc(_, _) | rAlloc(_, _)
| rAllocHybrid(_, _, _) | rAllocHybrid(_, _, _)
| eAlloc(_) | eAlloc(_)
...@@ -201,6 +205,7 @@ impl Instruction { ...@@ -201,6 +205,7 @@ impl Instruction {
| AllocAUHybrid(_,_) | AllocAUHybrid(_,_)
| NewReg(_) | NewReg(_)
| DeleteReg(_) | DeleteReg(_)
| Intrinsic_CollectReg(_)
| rAlloc(_, _) | rAlloc(_, _)
| rAllocHybrid(_,_,_) | rAllocHybrid(_,_,_)
| eAlloc(_) | eAlloc(_)
...@@ -302,6 +307,9 @@ impl Instruction { ...@@ -302,6 +307,9 @@ impl Instruction {
| AttrGetPriority(_) | AttrGetPriority(_)
| RandF(_,_) | RandF(_,_)
| RandI(_,_) | RandI(_,_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| Intrinsic_FindReg(_)
| ThreadIssetCPU(_, _) => false, | ThreadIssetCPU(_, _) => false,
// | AffinityEqual(_, _) // | AffinityEqual(_, _)
} }
...@@ -337,6 +345,10 @@ impl Instruction { ...@@ -337,6 +345,10 @@ impl Instruction {
| AllocAUHybrid(_, _) | AllocAUHybrid(_, _)
| NewReg(_) | NewReg(_)
| DeleteReg(_) | DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| rAlloc(_, _) | rAlloc(_, _)
| rAllocHybrid(_, _, _) | rAllocHybrid(_, _, _)
| eAlloc(_) | eAlloc(_)
...@@ -347,7 +359,7 @@ impl Instruction { ...@@ -347,7 +359,7 @@ impl Instruction {
| NewFutex(_) | NewFutex(_)
| DeleteFutex(_) | DeleteFutex(_)
| LockFutex(_, _) | LockFutex(_, _)
| UnlockFutex(_,_) | UnlockFutex(_, _)
| CondVarNew | CondVarNew
| CondVarDelete(_) | CondVarDelete(_)
| CondVarAcqLock(_) | CondVarAcqLock(_)
...@@ -463,6 +475,10 @@ impl Instruction { ...@@ -463,6 +475,10 @@ impl Instruction {
| AllocAUHybrid(_,_) | AllocAUHybrid(_,_)
| NewReg(_) | NewReg(_)
| DeleteReg(_) | DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetAllocatedReg(_)
| Intrinsic_GetFreeReg(_)
| rAlloc(_, _) | rAlloc(_, _)
| rAllocHybrid(_,_,_) | rAllocHybrid(_,_,_)
// | rAllocT(_) // | rAllocT(_)
...@@ -782,6 +798,18 @@ impl Instruction { ...@@ -782,6 +798,18 @@ impl Instruction {
&Instruction_::DeleteReg(regref) => { &Instruction_::DeleteReg(regref) => {
format!("COMMINST @uvm.delete_region({})", ops[regref]) format!("COMMINST @uvm.delete_region({})", ops[regref])
} }
&Instruction_::Intrinsic_CollectReg(regref) => {
format!("INTRINSIC @uvm.collect_region({})", ops[regref])
}
&Instruction_::Intrinsic_FindReg(regref) => {
format!("INTRINSIC @uvm.find_region({})", ops[regref])
}
&Instruction_::Intrinsic_GetAllocatedReg(regref) => {
format!("INTRINSIC @uvm.get_allocated_region({})", ops[regref])
}
&Instruction_::Intrinsic_GetFreeReg(regref) => {
format!("INTRINSIC @uvm.get_free_region({})", ops[regref])
}
&Instruction_::AllocAU(ref ty) => { &Instruction_::AllocAU(ref ty) => {
format!("COMMINST @uvm.AllocAU({})", ty.id()) format!("COMMINST @uvm.AllocAU({})", ty.id())
} }
...@@ -840,7 +868,9 @@ impl Instruction { ...@@ -840,7 +868,9 @@ impl Instruction {
) )
} }
&Instruction_::NewFutex(init_val) => format!("NEWFUTEX {}", ops[init_val]), &Instruction_::NewFutex(init_val) => {
format!("NEWFUTEX {}", ops[init_val])
}
&Instruction_::DeleteFutex(futexref) => { &Instruction_::DeleteFutex(futexref) => {
format!("DELETEFUTEX {}", ops[futexref]) format!("DELETEFUTEX {}", ops[futexref])
} }
...@@ -851,9 +881,7 @@ impl Instruction { ...@@ -851,9 +881,7 @@ impl Instruction {
format!("UNLOCKFUTEX {}, {}", ops[futexref], ops[count]) format!("UNLOCKFUTEX {}, {}", ops[futexref], ops[count])
} }
&Instruction_::CondVarNew => { &Instruction_::CondVarNew => format!("CONDVAR_NEW"),
format!("CONDVAR_NEW")
}
&Instruction_::CondVarDelete(addr) => { &Instruction_::CondVarDelete(addr) => {
format!("CONDVAR_DELETE {}", ops[addr]) format!("CONDVAR_DELETE {}", ops[addr])
} }
...@@ -1423,6 +1451,11 @@ pub enum Instruction_ { ...@@ -1423,6 +1451,11 @@ pub enum Instruction_ {
/// args: regionref to the target region /// args: regionref to the target region
DeleteReg(OpIndex), DeleteReg(OpIndex),
Intrinsic_CollectReg(OpIndex),
Intrinsic_FindReg(OpIndex),
Intrinsic_GetFreeReg(OpIndex),
Intrinsic_GetAllocatedReg(OpIndex),
BindRegion(OpIndex), BindRegion(OpIndex),
UnbindRegion(OpIndex), UnbindRegion(OpIndex),
......
...@@ -708,6 +708,10 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize { ...@@ -708,6 +708,10 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize {
| NewHybrid(_, _) | NewHybrid(_, _)
| NewReg(_) | NewReg(_)
| DeleteReg(_) | DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetFreeReg(_)
| Intrinsic_GetAllocatedReg(_)
| rAlloc(_, _) | rAlloc(_, _)
| rAllocHybrid(_, _, _) | rAllocHybrid(_, _, _)
| eAlloc(_) | eAlloc(_)
...@@ -726,7 +730,7 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize { ...@@ -726,7 +730,7 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize {
| CondVarBroadcast(_) | CondVarBroadcast(_)
| DeleteFutex(_) | DeleteFutex(_)
| LockFutex(_, _) | LockFutex(_, _)
| UnlockFutex(_,_) | UnlockFutex(_, _)
| NotifyThread(_) | NotifyThread(_)
| ThreadSetPriority(_, _) | ThreadSetPriority(_, _)
| ThreadGetPriority(_) | ThreadGetPriority(_)
......
...@@ -83,6 +83,10 @@ fn is_suitable_child(inst: &Instruction) -> bool { ...@@ -83,6 +83,10 @@ fn is_suitable_child(inst: &Instruction) -> bool {
| AllocAUHybrid(_, _) | AllocAUHybrid(_, _)
| NewReg(_) | NewReg(_)
| DeleteReg(_) | DeleteReg(_)
| Intrinsic_CollectReg(_)
| Intrinsic_FindReg(_)
| Intrinsic_GetFreeReg(_)
| Intrinsic_GetAllocatedReg(_)
| rAlloc(_, _) | rAlloc(_, _)
| rAllocHybrid(_, _, _) | rAllocHybrid(_, _, _)
| eAlloc(_) | eAlloc(_)
...@@ -94,7 +98,7 @@ fn is_suitable_child(inst: &Instruction) -> bool { ...@@ -94,7 +98,7 @@ fn is_suitable_child(inst: &Instruction) -> bool {
| NewFutex(_) | NewFutex(_)
| DeleteFutex(_) | DeleteFutex(_)
| LockFutex(_, _) | LockFutex(_, _)
| UnlockFutex(_,_) | UnlockFutex(_, _)
| CondVarNew | CondVarNew
| CondVarDelete(_) | CondVarDelete(_)
| CondVarAcqLock(_) | CondVarAcqLock(_)
......
...@@ -38,6 +38,7 @@ pub extern crate mu_ast as ast; ...@@ -38,6 +38,7 @@ pub extern crate mu_ast as ast;
#[macro_use] #[macro_use]
pub extern crate mu_utils as utils; pub extern crate mu_utils as utils;
extern crate core; extern crate core;
extern crate memsec;
pub extern crate mu_gc as gc; pub extern crate mu_gc as gc;
extern crate proc_macro; extern crate proc_macro;
extern crate rand; extern crate rand;
......
...@@ -23,3 +23,5 @@ pub const MM_RTMU_INFO: bool = true; ...@@ -23,3 +23,5 @@ pub const MM_RTMU_INFO: bool = true;
pub const MM_MU_TRACE: bool = false; pub const MM_MU_TRACE: bool = false;
pub const MM_MU_DEBUG: bool = false; pub const MM_MU_DEBUG: bool = false;
pub const MM_MU_INFO: bool = false; pub const MM_MU_INFO: bool = false;
pub const IRBLDR_TRACE: bool = true;
...@@ -168,6 +168,30 @@ lazy_static! { ...@@ -168,6 +168,30 @@ lazy_static! {
vec![ADDRESS_TYPE.clone()], // (region_ref) vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![] // returns nothing vec![] // returns nothing
); );
/// Resets an EMM region
pub static ref COLLECT_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_collect_reg",
vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![] // returns nothing
);
/// Returns the regionref for an object, or null if the object is not in any region
pub static ref FIND_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_find_reg",
vec![ADDRESS_TYPE.clone()], // (ref<T>)
vec![ADDRESS_TYPE.clone()] // returns regionref
);
/// Returns the number of free bytes in an EMM region
pub static ref GET_FREE_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_get_free_reg",
vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![UINT64_TYPE.clone()] // returns usize
);
/// Returns the number of allocated bytes in an EMM region
pub static ref GET_ALLOCATED_REG: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_get_allocated_reg",
vec![ADDRESS_TYPE.clone()], // (region_ref)
vec![UINT64_TYPE.clone()] // returns usize
);
/// Untraced rAlloc for non-hybrid types /// Untraced rAlloc for non-hybrid types
pub static ref RALLOC: RuntimeEntrypoint = RuntimeEntrypoint::new( pub static ref RALLOC: RuntimeEntrypoint = RuntimeEntrypoint::new(
"muentry_ralloc", "muentry_ralloc",
......
...@@ -16,6 +16,7 @@ use std::collections::HashMap; ...@@ -16,6 +16,7 @@ use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::RwLock; use std::sync::RwLock;
use utils::mem::memsec::memzero;
//use super::super::super::log_settings; //use super::super::super::log_settings;
use super::mm_rtmu_std::*; use super::mm_rtmu_std::*;
use super::*; use super::*;
...@@ -25,6 +26,8 @@ lazy_static! { ...@@ -25,6 +26,8 @@ lazy_static! {
RwLock::new(HashSet::new()); RwLock::new(HashSet::new());
pub static ref EMM_MAP: RwLock<HashMap<usize, usize>> = pub static ref EMM_MAP: RwLock<HashMap<usize, usize>> =
RwLock::new(HashMap::new()); RwLock::new(HashMap::new());
pub static ref REGION_LIMITS: RwLock<Vec<(Address, usize)>> =
RwLock::new(Vec::new());
} }
impl fmt::Debug for EMM_ROOTS { impl fmt::Debug for EMM_ROOTS {
...@@ -96,7 +99,10 @@ impl RegionRootSet { ...@@ -96,7 +99,10 @@ impl RegionRootSet {
} }
} }
use ast::op::BinOp::Add;
use memsec::memset;
use std::fmt; use std::fmt;
impl fmt::Debug for RegionRootSet { impl fmt::Debug for RegionRootSet {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let lock = self.refs.read().unwrap(); let lock = self.refs.read().unwrap();
...@@ -230,6 +236,21 @@ impl EMMRegion { ...@@ -230,6 +236,21 @@ impl EMMRegion {
emm_region emm_region
} }
pub fn collect_all(&mut self) {
// use utils::mem::memsec::memset;
self.freeptr = Address::from_mut_ptr(self.emmbackstore.sysbackstore);
// unsafe { memset(self.emmbackstore.sysbackstore, 0 as i32, self.size)
// };
}
pub fn get_free_space(&mut self) -> usize {
return self.size - (self.freeptr - self.emmbackstore.get_base_addr());
}
pub fn get_allocated_space(&mut self) -> usize {
return self.freeptr - self.emmbackstore.get_base_addr();
}
/// Deletes the backstore memory allocated as part of `new(_size)` method. /// Deletes the backstore memory allocated as part of `new(_size)` method.
/// ///
/// Note that the `EMMRegion` object itself should be deleted separately. /// Note that the `EMMRegion` object itself should be deleted separately.
...@@ -261,6 +282,7 @@ impl EMMRegion { ...@@ -261,6 +282,7 @@ impl EMMRegion {
// "RALLOC: allocating {} bytes", // "RALLOC: allocating {} bytes",
// _size // _size
// ); // );
unsafe { memzero(self.freeptr.to_ptr_mut::<u8>(), _size) };
self.freeptr += _size as ByteOffset; self.freeptr += _size as ByteOffset;
res res
} else { } else {
...@@ -290,22 +312,114 @@ impl EMMRegion { ...@@ -290,22 +312,114 @@ impl EMMRegion {
} }
} }
fn add_region_limits(addr: Address, size: usize) {
// let iaddr = addr.as_usize();
let mut wl = REGION_LIMITS.write().unwrap();
wl.push((addr, size));
}
fn remove_region_limits(addr: Address) {
// let iaddr = addr.as_usize();
let mut wl = REGION_LIMITS.write().unwrap();
for i in 0..wl.len() {
let cia = wl[i];
if cia.0 == addr {
wl.remove(i as usize);
break;
}
}
}
fn find_obj_region(obj_addr: Address) -> Address {
let iaddr = obj_addr;
let rl = REGION_LIMITS.read().unwrap();
for i in 0..rl.len() {
let cia = rl[i];
let cs = cia.1;
let cia = cia.0;
if (cia <= iaddr) && (iaddr < (cia + cs)) {
// let res = unsafe { Address::from_usize(cia) };
let res = cia;
info_if!(
log_settings::MM_RTMU_INFO,
"find_obj_region.returning({})",
res
);
return res;
}
}
let res = unsafe { Address::from_usize(0 as usize) };
info_if!(
log_settings::MM_RTMU_INFO,
"find_obj_region.returning({})",
res
);
res
}
#[no_mangle] #[no_mangle]
pub extern "C" fn muentry_new_reg(size: usize) -> Address { pub extern "C" fn muentry_new_reg(size: usize) -> Address {
info_if!(log_settings::MM_RTMU_INFO, "=== NEW REGION ==="); info_if!(log_settings::MM_RTMU_INFO, "=== NEW REGION ===");
let new_region = Box::new(EMMRegion::new(size)); let new_region = Box::new(EMMRegion::new(size));
info_if!(log_settings::MM_RTMU_INFO, "- {:?}", new_region); info_if!(log_settings::MM_RTMU_INFO, "- {:?}", new_region);
Address::from_ptr(Box::into_raw(new_region)) let res = Address::from_ptr(Box::into_raw(new_region));
add_region_limits(res, size);
res
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn muentry_delete_reg(regionref: Address) { pub extern "C" fn muentry_delete_reg(regionref: Address) {
remove_region_limits(regionref);
info_if!(log_settings::MM_RTMU_INFO, "=== DELETE REGION ==="); info_if!(log_settings::MM_RTMU_INFO, "=== DELETE REGION ===");
let mut regionbox = let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) }; unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
regionbox.delete_backstore(); regionbox.delete_backstore();
} }
#[no_mangle]
pub extern "C" fn muentry_collect_reg(regionref: Address) {
info_if!(log_settings::MM_RTMU_INFO, "=== COLLECT REGION ===");
let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
regionbox.collect_all();
let reg_ptr = Box::into_raw(regionbox);
}
#[no_mangle]
pub extern "C" fn muentry_find_reg(obj_ref: Address) -> Address {
info_if!(
log_settings::MM_RTMU_INFO,
"=== FIND REGION ({:#?})===",
obj_ref
);
let res = find_obj_region(obj_ref);
res
}
#[no_mangle]
pub extern "C" fn muentry_get_free_reg(regionref: Address) -> usize {
info_if!(log_settings::MM_RTMU_INFO, "=== GET FREE REGION ===");
let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
let res = regionbox.get_free_space();
let reg_ptr = Box::into_raw(regionbox);
res
}
#[no_mangle]
pub extern "C" fn muentry_get_allocated_reg(regionref: Address) -> usize {
info_if!(log_settings::MM_RTMU_INFO, "=== GET ALLOCATED REGION ===");
let mut regionbox =
unsafe { Box::from_raw(regionref.to_ptr_mut() as *mut EMMRegion) };
let res = regionbox.get_allocated_space();
let reg_ptr = Box::into_raw(regionbox);
res
}
/// Given the Address of a region struct `EMMRegion`, /// Given the Address of a region struct `EMMRegion`,
/// allocates a space of `size` length /// allocates a space of `size` length
/// (may allocate more to preserve alignment) /// (may allocate more to preserve alignment)
......
This diff is collapsed.
...@@ -555,6 +555,10 @@ pub const CMU_CI_UVM_IRBUILDER_EMM_UNBINDOBJECT: CMuCommInst = 0x403; ...@@ -555,6 +555,10 @@ pub const CMU_CI_UVM_IRBUILDER_EMM_UNBINDOBJECT: CMuCommInst = 0x403;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_NEWREGIONPA: CMuCommInst = 0x408; pub const CMU_CI_UVM_IRBUILDER_REGIONS_NEWREGIONPA: CMuCommInst = 0x408;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_BINDREGION: CMuCommInst = 0x409; pub const CMU_CI_UVM_IRBUILDER_REGIONS_BINDREGION: CMuCommInst = 0x409;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_UNBINDREGION: CMuCommInst = 0x40a; pub const CMU_CI_UVM_IRBUILDER_REGIONS_UNBINDREGION: CMuCommInst = 0x40a;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_COLLECTREGION: CMuCommInst = 0x40b;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_GETFREE: CMuCommInst = 0x40c;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_GETALLOCATED: CMuCommInst = 0x40d;
pub const CMU_CI_UVM_IRBUILDER_REGIONS_FIND: CMuCommInst = 0x40e;
pub const CMU_CI_UVM_IRBUILDER_SCHED_FIFO: CMuCommInst = 0x410; pub const CMU_CI_UVM_IRBUILDER_SCHED_FIFO: CMuCommInst = 0x410;
pub const CMU_CI_UVM_IRBUILDER_SCHED_RM: CMuCommInst = 0x411; pub const CMU_CI_UVM_IRBUILDER_SCHED_RM: CMuCommInst = 0x411;
pub const CMU_CI_UVM_IRBUILDER_SCHED_EDF: CMuCommInst = 0x412; pub const CMU_CI_UVM_IRBUILDER_SCHED_EDF: CMuCommInst = 0x412;
......
...@@ -1577,9 +1577,21 @@ impl MuIRBuilder { ...@@ -1577,9 +1577,21 @@ impl MuIRBuilder {
self.add_inst(id, NodeInst::NodeYield { id, exc_clause }); self.add_inst(id, NodeInst::NodeYield { id, exc_clause });
} }
pub fn new_newfutex(&mut self, id: MuID, init_val: MuVarNode, result_id: MuID) { pub fn new_newfutex(
&mut self,
id: MuID,
init_val: MuVarNode,
result_id: MuID
) {
trace!("new_newfutex"); trace!("new_newfutex");
self.add_inst(id, NodeInst::NodeNewFutex { id, init_val, result_id }); self.add_inst(
id,
NodeInst::NodeNewFutex {
id,
init_val,
result_id
}
);
} }
pub fn new_newattr(&mut self, id: MuID, result_id: MuID) { pub fn new_newattr(&mut self, id: MuID, result_id: MuID) {
...@@ -2664,7 +2676,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> { ...@@ -2664,7 +2676,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
NodeType::TypeTimerRef { id: _ } => MuType_::TimerRef, NodeType::TypeTimerRef { id: _ } => MuType_::TimerRef,
#[cfg(feature = "realtime")] #[cfg(feature = "realtime")]
NodeType::TypeFutexRef { id: _ } => MuType_::FutexRef, NodeType::TypeFutexRef { id: _ } => MuType_::FutexRef,
NodeType::TypeCondVarRef {id: _ } => MuType_::CondVarRef, NodeType::TypeCondVarRef { id: _ } => MuType_::CondVarRef,
ref t => panic!("{:?} not implemented", t) ref t => panic!("{:?} not implemented", t)
}; };
...@@ -4677,7 +4689,11 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> { ...@@ -4677,7 +4689,11 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
unimplemented!(); unimplemented!();
} }
NodeInst::NodeNewFutex { id, init_val, result_id } => { NodeInst::NodeNewFutex {
id,
init_val,
result_id
} => {
let iv = self.get_treenode(fcb, init_val); let iv = self.get_treenode(fcb, init_val);
let futexref_t = self.ensure_futexref(); let futexref_t = self.ensure_futexref();
let rv = self.new_ssa(fcb, result_id, futexref_t).clone_value(); let rv = self.new_ssa(fcb, result_id, futexref_t).clone_value();
...@@ -4961,7 +4977,12 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> { ...@@ -4961,7 +4977,12 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
// } // }
let my_index = ops.len(); let my_index = ops.len();
let op = self.add_opnd(fcb, ops, *vid); let op = self.add_opnd(fcb, ops, *vid);
assert_ir!(op.ty() == arg.ty, "op.ty() is -{:#?}- but arg.ty is -{:#?}", op.ty(), arg.ty); assert_ir!(
op.ty() == arg.ty,
"op.ty() is -{:#?}- but arg.ty is -{:#?}",
op.ty(),
arg.ty
);
DestArg::Normal(my_index) DestArg::Normal(my_index)
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
...@@ -5042,6 +5063,15 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> { ...@@ -5042,6 +5063,15 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
is_ccall: bool, is_ccall: bool,
call_conv: CallConvention call_conv: CallConvention
) -> CallData { ) -> CallData {
use log_settings::IRBLDR_TRACE;
trace_if!(
IRBLDR_TRACE,
"build_call_data(fid: {}, \n\t args_id: \n\t\t{:#?})",
callee,
args
);
let func_index = ops.len(); let func_index = ops.len();
let callee = self.add_opnd(fcb, ops, callee); let callee = self.add_opnd(fcb, ops, callee);