Commit e1844c8a authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Implemented NEWTHREAD on aarch64

parent ae1e02a5
......@@ -23,7 +23,7 @@ extern crate gcc;
fn main() {
gcc::compile_library("libruntime_c.a", &["src/runtime/runtime_c_x64_sysv.c"]);
gcc::Config::new()
gcc::Build::new()
.flag("-O3")
.flag("-c")
.file("src/runtime/runtime_asm_x64_sysv.S")
......@@ -38,7 +38,7 @@ fn main() {
fn main() {
gcc::compile_library("libruntime_c.a", &["src/runtime/runtime_c_aarch64_sysv.c"]);
gcc::Config::new()
gcc::Build::new()
.flag("-O3")
.flag("-c")
.file("src/runtime/runtime_asm_aarch64_sysv.S")
......@@ -59,19 +59,19 @@ fn main() {
use std::path::Path;
let mut compiler_name = String::new();
compiler_name.push_str("x86_64-rumprun-netbsd-gcc");
gcc::Config::new()
gcc::Build::new()
.flag("-O3")
.flag("-c")
.compiler(Path::new(compiler_name.as_str()))
.file("src/runtime/runtime_x64_sel4_rumprun_sysv.c")
.compile("libruntime_c.a");
gcc::Config::new()
gcc::Build::new()
.flag("-O3")
.flag("-c")
.compiler(Path::new(compiler_name.as_str()))
.file("src/runtime/runtime_asm_x64_sel4_rumprun_sysv.S")
.compile("libruntime_asm.a");
gcc::Config::new()
gcc::Build::new()
.flag("-O3")
.flag("-c")
.compiler(Path::new(compiler_name.as_str()))
......
......@@ -92,8 +92,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread(_, _) |
NewThreadExn(_, _) |
NewThread{ .. } |
NewFrameCursor(_) |
GetIRef(_) |
GetFieldIRef { .. } |
......@@ -150,8 +149,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread(_, _) |
NewThreadExn(_, _) |
NewThread{ .. } |
NewFrameCursor(_) |
Fence(_) |
Return(_) |
......@@ -228,8 +226,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread(_, _) |
NewThreadExn(_, _) |
NewThread{ .. } |
NewFrameCursor(_) |
GetIRef(_) |
GetFieldIRef { .. } |
......@@ -301,8 +298,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread(_, _) |
NewThreadExn(_, _) |
NewThread{ .. } |
NewFrameCursor(_) |
GetIRef(_) |
GetFieldIRef { .. } |
......@@ -459,11 +455,12 @@ pub enum Instruction_ {
/// create a new Mu thread, yields thread reference
/// args: stackref of a Mu stack, a list of arguments
NewThread(OpIndex, Vec<OpIndex>), // stack, args
/// create a new Mu thread, yields thread reference (thread resumes with exceptional value)
/// args: stackref of a Mu stack, an exceptional value
NewThreadExn(OpIndex, OpIndex), // stack, exception
NewThread {
stack: OpIndex,
thread_local: Option<OpIndex>,
is_exception: bool,
args: Vec<OpIndex>
},
/// create a frame cursor reference
/// args: stackref of a Mu stack
......@@ -724,16 +721,16 @@ impl Instruction_ {
&Instruction_::NewHybrid(ref ty, len) => format!("NEWHYBRID {} {}", ty, ops[len]),
&Instruction_::AllocAHybrid(ref ty, len) => format!("ALLOCAHYBRID {} {}", ty, ops[len]),
&Instruction_::NewStack(func) => format!("NEW_STACK {}", ops[func]),
&Instruction_::NewThread(stack, ref args) => {
&Instruction_::NewThread{stack, thread_local, is_exception, ref args} => {
let thread_local = thread_local.map(|t| format!("{}", ops[t])).unwrap_or("NULL".to_string());
format!(
"NEWTHREAD {} PASS_VALUES {}",
"SWAPSTACK {} THREADLOCAL({}) {} {}",
ops[stack],
op_vector_str(args, ops)
thread_local,
is_exception,
op_vector_str(args, ops),
)
}
&Instruction_::NewThreadExn(stack, exn) => {
format!("NEWTHREAD {} THROW_EXC {}", ops[stack], ops[exn])
}
&Instruction_::NewFrameCursor(stack) => format!("NEWFRAMECURSOR {}", ops[stack]),
&Instruction_::GetIRef(reference) => format!("GETIREF {}", ops[reference]),
&Instruction_::GetFieldIRef {
......
......@@ -53,6 +53,7 @@ use std::collections::HashMap;
// Number of nromal callee saved registers (excluding FP and LR, and SP)
pub const CALLEE_SAVED_COUNT: usize = 18;
pub const ARGUMENT_REG_COUNT: usize = 16;
macro_rules! REGISTER {
($id:expr, $name: expr, $ty: ident) => {
......@@ -885,18 +886,18 @@ pub fn get_callee_saved_offset(reg: MuID) -> isize {
(id as isize + 1) * (-8)
}
// Returns the callee saved register with the id...
/*pub fn get_callee_saved_register(offset: isize) -> P<Value> {
debug_assert!(offset <= -8 && (-offset) % 8 == 0);
let id = ((offset/-8) - 1) as usize;
if id < CALLEE_SAVED_GPRs.len() {
CALLEE_SAVED_GPRs[id].clone()
} else if id - CALLEE_SAVED_GPRs.len() < CALLEE_SAVED_FPRs.len() {
CALLEE_SAVED_FPRs[id - CALLEE_SAVED_GPRs.len()].clone()
// Gets the offset of the argument register when passed on the stack
pub fn get_argument_reg_offset(reg: MuID) -> isize {
let reg = get_color_for_precolored(reg);
let id = if reg >= FPR_ID_START {
(reg - ARGUMENT_FPRS[0].id()) / 2
} else {
panic!("There is no callee saved register with id {}", offset)
}
}*/
(reg - ARGUMENT_GPRS[0].id()) / 2 + ARGUMENT_FPRS.len()
};
(id as isize + 1) * (-8)
}
pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRS.iter() {
......@@ -959,7 +960,7 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize {
// runtime
New(_) | NewHybrid(_, _) => 10,
NewStack(_) | NewThread(_, _) | NewThreadExn(_, _) | NewFrameCursor(_) => 10,
NewStack(_) | NewThread { .. } | NewFrameCursor(_) => 10,
ThreadExit => 10,
CurrentStack => 10,
KillStack(_) => 10,
......@@ -1526,6 +1527,14 @@ pub fn make_value_int_const(val: u64, vm: &VM) -> P<Value> {
})
}
pub fn make_value_nullref(vm: &VM) -> P<Value> {
P(Value {
hdr: MuEntityHeader::unnamed(vm.next_id()),
ty: REF_VOID_TYPE.clone(),
v: Value_::Constant(Constant::NullRef)
})
}
// Replaces the zero register with a temporary whose value is zero (or returns the orignal register)
/* TODO use this function for the following arguments:
......
......@@ -657,7 +657,7 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize {
// runtime call
New(_) | NewHybrid(_, _) => 10,
NewStack(_) | NewThread(_, _) | NewThreadExn(_, _) | NewFrameCursor(_) => 10,
NewStack(_) | NewThread { .. } | NewFrameCursor(_) => 10,
ThreadExit => 10,
CurrentStack => 10,
KillStack(_) => 10,
......
......@@ -466,13 +466,6 @@ fn copy_inline_blocks(
let ref ops = inst.ops;
let ref v = inst.v;
trace!(
"ISAAC: Inlining [{} -> {}] : {} -> {}",
old_block.name(),
block_name,
inst_name,
hdr.name()
);
match v {
&Instruction_::Return(ref vec) => {
// change RET to a branch
......
......@@ -56,7 +56,7 @@ fn main() {
use std::path::Path;
let mut compiler_name = String::new();
compiler_name.push_str("x86_64-rumprun-netbsd-gcc");
gcc::Config::new().flag("-O3").flag("-c")
gcc::Build::new().flag("-O3").flag("-c")
.compiler(Path::new(compiler_name.as_str()))
.file("src/heap/gc/clib_x64_sel4_rumprun.c")
.compile("libgc_clib_x64.a");
......
......@@ -90,6 +90,30 @@ lazy_static! {
jit: RwLock::new(None),
};
// impl/decl: thread.rs
pub static ref NEW_THREAD_NORMAL: RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig{
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
arg_tys: vec![STACKREF_TYPE.clone(), REF_VOID_TYPE.clone()],
ret_tys: vec![THREADREF_TYPE.clone()],
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_new_thread_normal")),
jit: RwLock::new(None),
};
// impl/decl: thread.rs
pub static ref NEW_THREAD_EXCEPTIONAL: RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig{
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
arg_tys: vec![STACKREF_TYPE.clone(), REF_VOID_TYPE.clone(), REF_VOID_TYPE.clone()],
ret_tys: vec![THREADREF_TYPE.clone()],
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_new_thread_exceptional")),
jit: RwLock::new(None),
};
// impl/decl: gc/lib.rs
pub static ref ALLOC_FAST : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
......
......@@ -299,14 +299,22 @@ pub extern "C" fn mu_main(
};
// FIXME: currently assumes no user defined thread local - See Issue #48
let thread = thread::MuThread::new_thread_normal(
thread::MuThread::new_thread_normal(
stack,
unsafe { Address::zero() },
args,
vm.clone()
);
thread.join().unwrap();
loop {
let thread = vm.pop_join_handle();
if thread.is_none() {
break;
}
thread.unwrap().join().unwrap();
}
trace!("All threads have exited, quiting...");
}
}
......
......@@ -60,10 +60,10 @@ begin_func muthread_start_normal
MOV SP, X0
// Pop the argument registers from the new stack
LDP D1, D0 , [SP, #14*8]
LDP D3, D2, [SP, #12*8]
LDP D5, D4, [SP, #10*8]
LDP D7, D6, [SP, #8*8]
LDP D1, D0, [SP, #14*8 ]
LDP D3, D2, [SP, #12*8 ]
LDP D5, D4, [SP, #10*8 ]
LDP D7, D6, [SP, #8*8 ]
LDP X1, X0, [SP, #6*8]
LDP X3, X2, [SP, #4*8]
LDP X5, X4, [SP, #2*8]
......
......@@ -316,6 +316,8 @@ pub struct MuThread {
/// a pointer to the virtual machine
pub vm: Arc<VM>
}
unsafe impl Sync for MuThread {}
unsafe impl Send for MuThread {}
// a few field offsets the compiler uses
lazy_static! {
......@@ -383,6 +385,7 @@ extern "C" {
/// new_sp: stack pointer for the mu stack
/// old_sp_loc: the location to store native stack pointer so we can later swap back
fn muthread_start_normal(new_sp: Address, old_sp_loc: Address);
fn muthread_start_exceptional(exception: Address, new_sp: Address, old_sp_loc: Address);
/// gets base poniter for current frame
pub fn get_current_frame_bp() -> Address;
......@@ -451,30 +454,34 @@ impl MuThread {
threadlocal: Address,
vals: Vec<ValueLocation>,
vm: Arc<VM>
) -> JoinHandle<()> {
) {
// set up arguments on stack
stack.setup_args(vals);
MuThread::mu_thread_launch(vm.next_id(), stack, threadlocal, vm)
let (join_handle, _) = MuThread::mu_thread_launch(vm.next_id(), stack, threadlocal, None, vm.clone());
vm.push_join_handle(join_handle);
}
/// creates and launches a mu thread, returns a JoinHandle
#[no_mangle]
pub extern "C" fn mu_thread_launch(
/// creates and launches a mu thread, returns a JoinHandle and address to its MuThread structure
fn mu_thread_launch(
id: MuID,
stack: Box<MuStack>,
user_tls: Address,
exception: Option<Address>,
vm: Arc<VM>
) -> JoinHandle<()> {
) -> (JoinHandle<()>, *mut MuThread) {
let new_sp = stack.sp;
match thread::Builder::new()
// The conversions between boxes and ptrs are needed here as a '*mut MuThread* can't be sent between threads
// but a Box can be. Also converting a Box to a ptr consumes it.
let muthread_ptr = Box::into_raw(Box::new(MuThread::new(id, mm::new_mutator(), stack, user_tls, vm)));
let muthread = unsafe {Box::from_raw(muthread_ptr)};
(match thread::Builder::new()
.name(format!("Mu Thread #{}", id))
.spawn(move || {
let mut muthread = MuThread::new(id, mm::new_mutator(), stack, user_tls, vm);
let muthread = Box::into_raw(muthread);
// set thread local
unsafe { set_thread_local(&mut muthread) };
unsafe { set_thread_local(muthread) };
let addr = unsafe { muentry_get_thread_local() };
let sp_threadlocal_loc = addr + *NATIVE_SP_LOC_OFFSET;
......@@ -482,14 +489,18 @@ impl MuThread {
debug!("sp_store: 0x{:x}", sp_threadlocal_loc);
unsafe {
muthread_start_normal(new_sp, sp_threadlocal_loc);
}
match exception {
Some(e) => muthread_start_exceptional(e, new_sp, sp_threadlocal_loc),
None => muthread_start_normal(new_sp, sp_threadlocal_loc)
}
debug!("returned to Rust stack. Going to quit");
// Thread finished, delete it's data
Box::from_raw(muthread);
}
}) {
Ok(handle) => handle,
Err(_) => panic!("failed to create a thread")
}
}, muthread_ptr)
}
/// creates metadata for a Mu thread
......@@ -644,3 +655,21 @@ pub unsafe extern "C" fn muentry_kill_stack(stack: *mut MuStack) {
// This new box will be destroyed upon returning
Box::from_raw(stack);
}
// Creates a new thread
#[no_mangle]
pub unsafe extern "C" fn muentry_new_thread_exceptional(stack: *mut MuStack, thread_local: Address, exception: Address) -> *mut MuThread {
let vm = MuThread::current_mut().vm.clone();
let (join_handle, muthread) = MuThread::mu_thread_launch(vm.next_id(), Box::from_raw(stack), thread_local, Some(exception), vm.clone());
vm.push_join_handle(join_handle);
muthread
}
// Creates a new thread
#[no_mangle]
pub unsafe extern "C" fn muentry_new_thread_normal(stack: *mut MuStack, thread_local: Address) -> *mut MuThread {
let vm = MuThread::current_mut().vm.clone();
let (join_handle, muthread) = MuThread::mu_thread_launch(vm.next_id(), Box::from_raw(stack), thread_local, None, vm.clone());
vm.push_join_handle(join_handle);
muthread
}
\ No newline at end of file
......@@ -1269,6 +1269,7 @@ struct BundleLoader<'lb, 'lvm> {
built_ref_void: Option<P<MuType>>,
built_tagref64: Option<P<MuType>>,
built_stackref: Option<P<MuType>>,
built_threadref: Option<P<MuType>>,
built_funcref_of: IdPMap<MuType>,
built_ref_of: IdPMap<MuType>,
......@@ -1307,6 +1308,7 @@ fn load_bundle(b: &mut MuIRBuilder) {
built_ref_void: Default::default(),
built_tagref64: Default::default(),
built_stackref: Default::default(),
built_threadref: Default::default(),
built_funcref_of: Default::default(),
built_ref_of: Default::default(),
built_iref_of: Default::default(),
......@@ -1444,6 +1446,25 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
impl_ty
}
fn ensure_threadref(&mut self) -> P<MuType> {
if let Some(ref impl_ty) = self.built_threadref {
return impl_ty.clone();
}
let id = self.vm.next_id();
let impl_ty = P(MuType {
hdr: MuEntityHeader::unnamed(id),
v: MuType_::ThreadRef
});
trace!("Ensure threadref is defined: {} {:?}", id, impl_ty);
self.built_types.insert(id, impl_ty.clone());
self.built_threadref = Some(impl_ty.clone());
impl_ty
}
fn ensure_i6(&mut self) -> P<MuType> {
if let Some(ref impl_ty) = self.built_i6 {
......@@ -2368,13 +2389,14 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
.iter()
.map(|iid| self.build_inst(fcb, *iid, blocks))
.collect::<Vec<_>>();
assert_ir!(
res.last()
.as_ref()
.unwrap()
.as_inst_ref()
.is_terminal_inst()
);
let n = res.len();
for i in 0..(n - 1) {
// None of the internal instruction should be a terminator
assert_ir!(!res[i].as_inst_ref().is_terminal_inst());
}
// The last instruction should be a terminator
assert_ir!(res[n - 1].as_inst_ref().is_terminal_inst());
res
}
......@@ -3261,7 +3283,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
let impl_ord = self.build_mem_ord(ord);
let impl_loc = self.get_treenode(fcb, loc);
let impl_rvtype = self.get_built_type(refty);
let impl_rv = self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
let impl_rv = self.new_ssa(fcb, result_id, self.vm.make_strong_type(impl_rvtype)).clone_value();
let impl_refty = self.get_built_type(refty);
assert_ir!(impl_ord != MemoryOrder::Release && impl_ord != MemoryOrder::AcqRel);
......@@ -3382,10 +3404,6 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
exc_clause, //Option<MuExcClause>,
keepalive_clause // Option<MuKeepaliveClause>,
} => {
// TODO: Validate IR
/*
validate cur_stack_clause and new_stack_clause
*/
let mut ops: Vec<P<TreeNode>> = vec![self.get_treenode(fcb, swappee)];
assert_ir!(ops[0].ty().is_stackref());
......@@ -3407,30 +3425,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
})
.collect::<Vec<_>>();
let (is_exception, args) = match **new_stack_clause {
NodeNewStackClause::PassValues {
ref tys, ref vars, ..
} => {
let args_begin_index = ops.len();
let args = self.add_opnds(fcb, &mut ops, vars);
assert_ir!(
args.len() == tys.len() &&
args.iter()
.zip(tys)
.all(|(arg, tid)| arg.ty() == self.get_built_type(*tid))
);
(
false,
(args_begin_index..(vars.len() + 1)).collect::<Vec<_>>()
)
}
NodeNewStackClause::ThrowExc { ref exc, .. } => {
let exc_arg = ops.len();
self.add_opnd(fcb, &mut ops, *exc);
(true, vec![exc_arg])
}
};
let (is_exception, args) = self.build_new_stack_clause(new_stack_clause, fcb, &mut ops);
match exc_clause {
Some(ecid) => {
......@@ -3442,9 +3437,8 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
self.build_destination(fcb, ecnode.exc, &mut ops, &[], blocks);
assert_ir!(match **cur_stack_clause {
// Can't have an exception
// Can't have an exception clause
NodeCurrentStackClause::KillOld { .. } => false,
// clause
_ => true
});
......@@ -3489,6 +3483,52 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
}
}
}
NodeInst::NodeNewThread {
id: _,
result_id,
stack,
threadlocal,
new_stack_clause,
exc_clause
} => {
if exc_clause.is_some() {
unimplemented!();
}
let mut ops: Vec<P<TreeNode>> = vec![self.get_treenode(fcb, stack)];
assert_ir!(ops[0].ty().is_stackref());
let new_stack_clause = self.b.bundle.ns_clauses.get(&new_stack_clause).unwrap();
let impl_threadref = self.ensure_threadref();
let impl_rv = self.new_ssa(fcb, result_id, impl_threadref).clone_value();
let threadlocal = match threadlocal {
Some(tl) => {
let index = ops.len();
let tl = self.add_opnd(fcb, &mut ops, tl);
assert_ir!(tl.ty().is_ref() && tl.ty().get_referent_ty().unwrap().is_void());
Some(index)
}
None => None,
};
let (is_exception, args) = self.build_new_stack_clause(new_stack_clause, fcb, &mut ops);
Instruction {
hdr: hdr,
value: Some(vec![impl_rv]),
ops: ops,
v: Instruction_::NewThread {
stack: 0,
thread_local: threadlocal,
is_exception: is_exception,
args: args
}
}
}
NodeInst::NodeCommInst {
id,
ref result_ids,
......@@ -3585,6 +3625,37 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
res
}
// Returns true indicating an exception clause
// and returns a list of ops
fn build_new_stack_clause(
&mut self,
nsc: &NodeNewStackClause,
fcb: &mut FuncCtxBuilder,
ops: &mut Vec<P<TreeNode>>
) -> (bool, Vec<OpIndex>) {
match nsc {
&NodeNewStackClause::PassValues {
ref tys, ref vars, ..
} => {
let args_begin_index = ops.len();
let args = self.add_opnds(fcb, ops, vars);
assert_ir!(
args.len() == tys.len() &&
args.iter()
.zip(tys)
.all(|(arg, tid)| arg.ty() == self.get_built_type(*tid))
);
let arg_indices = (args_begin_index..(vars.len() + 1)).collect::<Vec<_>>();
(false, arg_indices)
}
&NodeNewStackClause::ThrowExc { ref exc, .. } => {
let exc_arg = ops.len();
self.add_opnd(fcb, ops, *exc);
(true, vec![exc_arg])
}
}
}
fn build_call_data(
&mut self,
fcb: &mut FuncCtxBuilder,
......
......@@ -38,9 +38,11 @@ use vm::vm_options::MuLogLevel;
use log::LogLevel;
use std::sync::Arc;
use std::sync::RwLock;
use std::sync::Mutex;
use std::sync::RwLockWriteGuard;
use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
use std::thread::JoinHandle;
use std::collections::LinkedList;
use std;
use utils::bit_utils::{bits_ones, u64_asr};
......@@ -75,18 +77,20 @@ pub struct VM {
name_id_map: RwLock<HashMap<MuName, MuID>>, // +64
/// types declared to the VM
types: RwLock<HashMap<MuID, P<MuType>>>, // +120
/// Ref types declared by 'make_strong_type', the key is the ID of the Referant
ref_types: RwLock<HashMap<MuID, P<MuType>>>,
/// types that are resolved as BackendType
backend_type_info: RwLock<HashMap<MuID, Box<BackendType>>>, // +176
backend_type_info: RwLock<HashMap<MuID, Box<BackendType>>>,
/// constants declared to the VM
constants: RwLock<HashMap<MuID, P<Value>>>, // +232
constants: RwLock<HashMap<MuID, P<Value>>>,
/// globals declared to the VM
globals: RwLock<HashMap<MuID, P<Value>>>, // +288
globals: RwLock<HashMap<MuID, P<Value>>>,
/// function signatures declared
func_sigs: RwLock<HashMap<MuID, P<MuFuncSig>>>, // +400
func_sigs: RwLock<HashMap<MuID, P<MuFuncSig>>>,
/// functions declared to the VM
funcs: RwLock<HashMap<MuID, RwLock<MuFunction>>>, // +456
funcs: RwLock<HashMap<MuID, RwLock<MuFunction>>>,
/// primordial function that is set to make boot image
primordial: RwLock<Option<PrimordialThreadInfo>>, // +568
primordial: RwLock<Option<PrimordialThreadInfo>>,
/// current options for this VM
pub vm_options: VMOptions, // +624
......@@ -118,7 +122,8 @@ pub struct VM {
compiled_callsite_table: RwLock<HashMap<Address, CompiledCallsite>>, // 896
/// Nnmber of callsites in the callsite tables
callsite_count: AtomicUsize
callsite_count: AtomicUsize,
pub pending_joins: Mutex<LinkedList<JoinHandle<()>>> // A list of all threads currently waiting to be joined
}
unsafe impl rodal::Dump for VM {
......@@ -129,6 +134,7 @@ unsafe impl rodal::Dump for VM {
dumper.dump_object(&self.id_name_map);
dumper.dump_object(&self.name_id_map);
dumper.dump_object(&self.types);
dumper.dump_object(&self.ref_types);
dumper.dump_object(&self.backend_type_info);
dumper.dump_object(&self.constants);
dumper.dump_object(&self.globals);
......@@ -161,6 +167,11 @@ unsafe impl rodal::Dump for VM {
rodal::EmptyHashMap::<Address, CompiledCallsite>::new()
));
dumper.dump_object(&self.callsite_count);
dumper.dump_padding(&self.pending_joins);
dumper.dump_object_here(&Mutex::new(
rodal::EmptyLinkedList::<JoinHandle<()>>::new()
));
}
}
......@@ -213,6 +224,7 @@ impl<'a> VM {
name_id_map: RwLock::new(HashMap::new()),
constants: RwLock::new(HashMap::new()),
types: RwLock::new(HashMap::new()),
ref_types: RwLock::new(HashMap::new()),
backend_type_info: RwLock::new(HashMap::new()),
globals: RwLock::new(HashMap::new()),
global_locations: RwLock::new(hashmap!{}),
......@@ -224,7 +236,8 @@ impl<'a> VM {
primordial: RwLock::new(None),
aot_pending_funcref_store: RwLock::new(HashMap::new()),
compiled_callsite_table: RwLock::new(HashMap::new()),
callsite_count: ATOMIC_USIZE_INIT
callsite_count: ATOMIC_USIZE_INIT,
pending_joins: Mutex::new(LinkedList::new()),
};
// insert all internal types
......@@ -789,6 +802,27 @@ impl<'a> VM {
}
}
pub fn make_strong_type(&self, ty: P<MuType>) -> P<MuType> {
match &ty.v {
&MuType_::WeakRef(ref t) => {
let res = self.ref_types
.read()
.unwrap()
.get(&t.id())
.map(|x| x.clone());
match res {
Some(ty) => ty,
None => {