Commit 79589ae4 authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Formating

parent 8b5df80c
......@@ -92,7 +92,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread{ .. } |
NewThread { .. } |
NewFrameCursor(_) |
GetIRef(_) |
GetFieldIRef { .. } |
......@@ -149,7 +149,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread{ .. } |
NewThread { .. } |
NewFrameCursor(_) |
Fence(_) |
Return(_) |
......@@ -226,7 +226,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread{ .. } |
NewThread { .. } |
NewFrameCursor(_) |
GetIRef(_) |
GetFieldIRef { .. } |
......@@ -298,7 +298,7 @@ impl Instruction {
NewHybrid(_, _) |
AllocAHybrid(_, _) |
NewStack(_) |
NewThread{ .. } |
NewThread { .. } |
NewFrameCursor(_) |
GetIRef(_) |
GetFieldIRef { .. } |
......@@ -721,8 +721,15 @@ impl Instruction_ {
&Instruction_::NewHybrid(ref ty, len) => format!("NEWHYBRID {} {}", ty, ops[len]),
&Instruction_::AllocAHybrid(ref ty, len) => format!("ALLOCAHYBRID {} {}", ty, ops[len]),
&Instruction_::NewStack(func) => format!("NEW_STACK {}", ops[func]),
&Instruction_::NewThread{stack, thread_local, is_exception, ref args} => {
let thread_local = thread_local.map(|t| format!("{}", ops[t])).unwrap_or("NULL".to_string());
&Instruction_::NewThread {
stack,
thread_local,
is_exception,
ref args
} => {
let thread_local = thread_local
.map(|t| format!("{}", ops[t]))
.unwrap_or("NULL".to_string());
format!(
"SWAPSTACK {} THREADLOCAL({}) {} {}",
ops[stack],
......
......@@ -2141,7 +2141,12 @@ impl<'a> InstructionSelection {
vm
);
}
Instruction_::NewThread{stack, thread_local, is_exception, ref args} => {
Instruction_::NewThread {
stack,
thread_local,
is_exception,
ref args
} => {
trace!("Instruction Selection on NEWTHREAD");
let ref ops = inst.ops;
let res = self.get_result_value(node, 0);
......@@ -2173,7 +2178,8 @@ impl<'a> InstructionSelection {
vm
);
let arg_values = self.emit_arg_values(&args, ops, f_content, f_context, vm);
let arg_values =
self.emit_arg_values(&args, ops, f_content, f_context, vm);
// Pass the arguments, stack arguments are placed below the new_sp,
// register arguments are placed above it
......@@ -2193,7 +2199,12 @@ impl<'a> InstructionSelection {
vm
);
emit_sub_u64(self.backend.as_mut(), &new_sp, &new_sp, (ARGUMENT_REG_COUNT*WORD_SIZE) as u64);
emit_sub_u64(
self.backend.as_mut(),
&new_sp,
&new_sp,
(ARGUMENT_REG_COUNT * WORD_SIZE) as u64
);
emit_store_base_offset(
self.backend.as_mut(),
&stack,
......@@ -3878,7 +3889,10 @@ impl<'a> InstructionSelection {
modify_arg_base: bool,
reg_args: bool, // Whether to pass register arguments
stack_args: bool, // Whether to pass stack arguments
reg_arg_base: Option<&P<Value>>, // If this is none put reg arguments in registers, otherwise store them at an offset from reg_arg_base
// If this is none put reg arguments in registers,
// otherwise store them at an offset from reg_arg_base
reg_arg_base: Option<&P<Value>>,
f_context: &mut FunctionContext,
vm: &VM
) -> (usize, Vec<P<Value>>) {
......@@ -3944,8 +3958,18 @@ impl<'a> InstructionSelection {
let arg_loc_h_id = arg_loc.id() + 2; //get_register_from_id();
let (arg_loc_l, arg_loc_h) = match reg_arg_base {
Some(ref b) => (
make_value_base_offset(b, get_argument_reg_offset(arg_loc.id()) as i64, &arg_loc.ty, vm),
make_value_base_offset(b, get_argument_reg_offset(arg_loc_h_id) as i64, &arg_loc.ty, vm)
make_value_base_offset(
b,
get_argument_reg_offset(arg_loc.id()) as i64,
&arg_loc.ty,
vm
),
make_value_base_offset(
b,
get_argument_reg_offset(arg_loc_h_id) as i64,
&arg_loc.ty,
vm
)
),
None => (arg_loc.clone(), get_register_from_id(arg_loc_h_id))
};
......@@ -3969,8 +3993,15 @@ impl<'a> InstructionSelection {
} else {
if (reg_args && arg_loc.is_reg()) || (stack_args && !arg_loc.is_reg()) {
let arg_loc = match reg_arg_base {
Some(ref b) if arg_loc.is_reg() => make_value_base_offset(b, get_argument_reg_offset(arg_loc.id()) as i64, &arg_loc.ty, vm),
_ => arg_loc.clone(),
Some(ref b) if arg_loc.is_reg() => {
make_value_base_offset(
b,
get_argument_reg_offset(arg_loc.id()) as i64,
&arg_loc.ty,
vm
)
}
_ => arg_loc.clone()
};
emit_move_value_to_value(
......@@ -4360,7 +4391,14 @@ impl<'a> InstructionSelection {
)
}
fn emit_arg_values(&mut self, args: &Vec<OpIndex>, ops: &Vec<P<TreeNode>>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) -> Vec<P<Value>> {
fn emit_arg_values(
&mut self,
args: &Vec<OpIndex>,
ops: &Vec<P<TreeNode>>,
f_content: &FunctionContent,
f_context: &mut FunctionContext,
vm: &VM
) -> Vec<P<Value>> {
// prepare args (they could be instructions, we need to emit inst and get value)
let mut arg_values = vec![];
for arg_index in args {
......@@ -4644,7 +4682,10 @@ impl<'a> InstructionSelection {
}
}
fn get_potentially_excepting(resumption: Option<&ResumptionData>, f_content: &FunctionContent) -> Option<MuName> {
fn get_potentially_excepting(
resumption: Option<&ResumptionData>,
f_content: &FunctionContent
) -> Option<MuName> {
if resumption.is_some() {
let target_id = resumption.unwrap().exn_dest.target;
Some(f_content.get_block(target_id).name())
......@@ -4653,17 +4694,19 @@ impl<'a> InstructionSelection {
}
}
fn record_callsite(&mut self, resumption: Option<&ResumptionData>, callsite: ValueLocation, stack_arg_size: usize) {
fn record_callsite(
&mut self,
resumption: Option<&ResumptionData>,
callsite: ValueLocation,
stack_arg_size: usize
) {
let target_block = match resumption {
Some(rd) => rd.exn_dest.target,
None => 0
};
self.current_callsites.push_back((
callsite.to_relocatable(),
target_block,
stack_arg_size
));
self.current_callsites
.push_back((callsite.to_relocatable(), target_block, stack_arg_size));
}
fn emit_mu_call(
......
......@@ -109,7 +109,8 @@ lazy_static! {
arg_tys: vec![STACKREF_TYPE.clone(), REF_VOID_TYPE.clone(), REF_VOID_TYPE.clone()],
ret_tys: vec![THREADREF_TYPE.clone()],
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_new_thread_exceptional")),
aot: ValueLocation::Relocatable(RegGroup::GPR,
String::from("muentry_new_thread_exceptional")),
jit: RwLock::new(None),
};
......
......@@ -299,12 +299,7 @@ pub extern "C" fn mu_main(
};
// FIXME: currently assumes no user defined thread local - See Issue #48
thread::MuThread::new_thread_normal(
stack,
unsafe { Address::zero() },
args,
vm.clone()
);
thread::MuThread::new_thread_normal(stack, unsafe { Address::zero() }, args, vm.clone());
loop {
let thread = vm.pop_join_handle();
......
......@@ -457,7 +457,8 @@ impl MuThread {
) {
// set up arguments on stack
stack.setup_args(vals);
let (join_handle, _) = MuThread::mu_thread_launch(vm.next_id(), stack, threadlocal, None, vm.clone());
let (join_handle, _) =
MuThread::mu_thread_launch(vm.next_id(), stack, threadlocal, None, vm.clone());
vm.push_join_handle(join_handle);
}
......@@ -471,36 +472,41 @@ impl MuThread {
) -> (JoinHandle<()>, *mut MuThread) {
let new_sp = stack.sp;
// The conversions between boxes and ptrs are needed here as a '*mut MuThread* can't be sent between threads
// but a Box can be. Also converting a Box to a ptr consumes it.
let muthread_ptr = Box::into_raw(Box::new(MuThread::new(id, mm::new_mutator(), stack, user_tls, vm)));
let muthread = unsafe {Box::from_raw(muthread_ptr)};
(match thread::Builder::new()
.name(format!("Mu Thread #{}", id))
.spawn(move || {
let muthread = Box::into_raw(muthread);
// set thread local
unsafe { set_thread_local(muthread) };
let addr = unsafe { muentry_get_thread_local() };
let sp_threadlocal_loc = addr + *NATIVE_SP_LOC_OFFSET;
debug!("new sp: 0x{:x}", new_sp);
debug!("sp_store: 0x{:x}", sp_threadlocal_loc);
unsafe {
match exception {
Some(e) => muthread_start_exceptional(e, new_sp, sp_threadlocal_loc),
None => muthread_start_normal(new_sp, sp_threadlocal_loc)
// The conversions between boxes and ptrs are needed here as a '*mut MuThread* can't be
// sent between threads but a Box can. Also converting a Box to a ptr consumes it.
let muthread_ptr = Box::into_raw(Box::new(
MuThread::new(id, mm::new_mutator(), stack, user_tls, vm)
));
let muthread = unsafe { Box::from_raw(muthread_ptr) };
(
match thread::Builder::new()
.name(format!("Mu Thread #{}", id))
.spawn(move || {
let muthread = Box::into_raw(muthread);
// set thread local
unsafe { set_thread_local(muthread) };
let addr = unsafe { muentry_get_thread_local() };
let sp_threadlocal_loc = addr + *NATIVE_SP_LOC_OFFSET;
debug!("new sp: 0x{:x}", new_sp);
debug!("sp_store: 0x{:x}", sp_threadlocal_loc);
unsafe {
match exception {
Some(e) => muthread_start_exceptional(e, new_sp, sp_threadlocal_loc),
None => muthread_start_normal(new_sp, sp_threadlocal_loc)
}
// Thread finished, delete it's data
Box::from_raw(muthread);
}
// Thread finished, delete it's data
Box::from_raw(muthread);
}
}) {
Ok(handle) => handle,
Err(_) => panic!("failed to create a thread")
}, muthread_ptr)
}) {
Ok(handle) => handle,
Err(_) => panic!("failed to create a thread")
},
muthread_ptr
)
}
/// creates metadata for a Mu thread
......@@ -658,18 +664,37 @@ pub unsafe extern "C" fn muentry_kill_stack(stack: *mut MuStack) {
// Creates a new thread
#[no_mangle]
pub unsafe extern "C" fn muentry_new_thread_exceptional(stack: *mut MuStack, thread_local: Address, exception: Address) -> *mut MuThread {
pub unsafe extern "C" fn muentry_new_thread_exceptional(
stack: *mut MuStack,
thread_local: Address,
exception: Address
) -> *mut MuThread {
let vm = MuThread::current_mut().vm.clone();
let (join_handle, muthread) = MuThread::mu_thread_launch(vm.next_id(), Box::from_raw(stack), thread_local, Some(exception), vm.clone());
let (join_handle, muthread) = MuThread::mu_thread_launch(
vm.next_id(),
Box::from_raw(stack),
thread_local,
Some(exception),
vm.clone()
);
vm.push_join_handle(join_handle);
muthread
}
// Creates a new thread
#[no_mangle]
pub unsafe extern "C" fn muentry_new_thread_normal(stack: *mut MuStack, thread_local: Address) -> *mut MuThread {
pub unsafe extern "C" fn muentry_new_thread_normal(
stack: *mut MuStack,
thread_local: Address
) -> *mut MuThread {
let vm = MuThread::current_mut().vm.clone();
let (join_handle, muthread) = MuThread::mu_thread_launch(vm.next_id(), Box::from_raw(stack), thread_local, None, vm.clone());
let (join_handle, muthread) = MuThread::mu_thread_launch(
vm.next_id(),
Box::from_raw(stack),
thread_local,
None,
vm.clone()
);
vm.push_join_handle(join_handle);
muthread
}
\ No newline at end of file
}
......@@ -3283,7 +3283,8 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
let impl_ord = self.build_mem_ord(ord);
let impl_loc = self.get_treenode(fcb, loc);
let impl_rvtype = self.get_built_type(refty);
let impl_rv = self.new_ssa(fcb, result_id, self.vm.make_strong_type(impl_rvtype)).clone_value();
let impl_rv = self.new_ssa(fcb, result_id,
self.vm.make_strong_type(impl_rvtype)).clone_value();
let impl_refty = self.get_built_type(refty);
assert_ir!(impl_ord != MemoryOrder::Release && impl_ord != MemoryOrder::AcqRel);
......@@ -3425,7 +3426,8 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
})
.collect::<Vec<_>>();
let (is_exception, args) = self.build_new_stack_clause(new_stack_clause, fcb, &mut ops);
let (is_exception, args) =
self.build_new_stack_clause(new_stack_clause, fcb, &mut ops);
match exc_clause {
Some(ecid) => {
......@@ -3507,13 +3509,15 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
Some(tl) => {
let index = ops.len();
let tl = self.add_opnd(fcb, &mut ops, tl);
assert_ir!(tl.ty().is_ref() && tl.ty().get_referent_ty().unwrap().is_void());
assert_ir!(tl.ty().is_ref() &&
tl.ty().get_referent_ty().unwrap().is_void());
Some(index)
}
None => None,
};
let (is_exception, args) = self.build_new_stack_clause(new_stack_clause, fcb, &mut ops);
let (is_exception, args) =
self.build_new_stack_clause(new_stack_clause, fcb, &mut ops);
Instruction {
......
......@@ -123,7 +123,9 @@ pub struct VM {
/// Nnmber of callsites in the callsite tables
callsite_count: AtomicUsize,
pub pending_joins: Mutex<LinkedList<JoinHandle<()>>> // A list of all threads currently waiting to be joined
/// A list of all threads currently waiting to be joined
pub pending_joins: Mutex<LinkedList<JoinHandle<()>>>
}
unsafe impl rodal::Dump for VM {
......@@ -169,9 +171,7 @@ unsafe impl rodal::Dump for VM {
dumper.dump_object(&self.callsite_count);
dumper.dump_padding(&self.pending_joins);
dumper.dump_object_here(&Mutex::new(
rodal::EmptyLinkedList::<JoinHandle<()>>::new()
));
dumper.dump_object_here(&Mutex::new(rodal::EmptyLinkedList::<JoinHandle<()>>::new()));
}
}
......@@ -237,7 +237,7 @@ impl<'a> VM {
aot_pending_funcref_store: RwLock::new(HashMap::new()),
compiled_callsite_table: RwLock::new(HashMap::new()),
callsite_count: ATOMIC_USIZE_INIT,
pending_joins: Mutex::new(LinkedList::new()),
pending_joins: Mutex::new(LinkedList::new())
};
// insert all internal types
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment