Commit 866e3df4 authored by qinsoon's avatar qinsoon

build exception table during current_thread_as_mu_thread(), hopefully

this will fix the test case
parent 1b72b623
Pipeline #592 passed with stages
in 42 minutes and 24 seconds
......@@ -62,6 +62,10 @@ impl <'vm> Compiler<'vm> {
hprof_print_timing(hprof::profiler().root());
func.set_compiled();
if self.vm.is_running() {
// build exception table for this function
unimplemented!()
}
}
pub fn get_policy(&self) -> &RefCell<CompilerPolicy> {
......
......@@ -52,21 +52,26 @@ pub extern fn throw_exception_internal(exception_obj: Address, frame_cursor: Add
let mut callsite = get_return_address(current_frame_pointer);
let mut previous_frame_pointer = get_previous_frame_pointer(current_frame_pointer); // thrower::fp, the starting point of the previous frame
// acquire lock for exception table
let compiled_exception_table = vm.compiled_exception_table.read().unwrap();
loop {
// Lookup the table for the callsite
trace!("Callsite: 0x{:x}", callsite);
trace!("\tprevious_frame_pointer: 0x{:x}", previous_frame_pointer);
trace!("\tcurrent_frame_pointer: 0x{:x}", current_frame_pointer);
let table_entry = vm.compiled_exception_table.get(&callsite);
let &(catch_address, compiled_func) = {
let table_entry = compiled_exception_table.get(&callsite);
if table_entry.is_none() {
error!("Cannot find Mu callsite (i.e. we have reached a native frame), either there isn't a catch block to catch the exception or your catch block is above a native function call");
print_backtrace(frame_cursor);
unreachable!(); // The above function will not return
}
if table_entry.is_none() {
error!("Cannot find Mu callsite (i.e. we have reached a native frame), either there isn't a catch block to catch the exception or your catch block is above a native function call");
print_backtrace(frame_cursor);
unreachable!(); // The above function will not return
}
let &(catch_address, compiled_func) = table_entry.unwrap();
table_entry.unwrap()
};
// Check for a catch block at this callsite (there won't be one on the first iteration of this loop)
if !catch_address.is_zero() {
......@@ -80,6 +85,7 @@ pub extern fn throw_exception_internal(exception_obj: Address, frame_cursor: Add
}
// Found a catch block, branch to it
drop(compiled_exception_table); // drop the lock first
unsafe { thread::exception_restore(catch_address, frame_cursor.to_ptr(), sp); }
}
......@@ -129,11 +135,13 @@ fn print_backtrace(base: Address) -> !{
let mut frame_pointer = base;
let mut frame_count = 0;
let compiled_exception_table = vm.compiled_exception_table.read().unwrap();
loop {
let callsite = get_return_address(frame_pointer);
if vm.compiled_exception_table.contains_key(&callsite) {
let &(_, compiled_func_ptr) = vm.compiled_exception_table.get(&callsite).unwrap();
if compiled_exception_table.contains_key(&callsite) {
let &(_, compiled_func_ptr) = compiled_exception_table.get(&callsite).unwrap();
unsafe {
let ref compiled_func = *compiled_func_ptr;
......
......@@ -393,6 +393,9 @@ impl MuThread {
pub unsafe fn current_thread_as_mu_thread(threadlocal: Address, vm: Arc<VM>) -> bool {
use std::usize;
// build exception table
vm.build_exception_table();
if ! unsafe{muentry_get_thread_local()}.is_zero() {
warn!("current thread has a thread local (has a muthread to it)");
return false;
......@@ -443,13 +446,6 @@ impl MuThread {
// set thread local
unsafe {set_thread_local(ptr_fake_mu_thread)};
// let addr = unsafe {muentry_get_thread_local()};
// let sp_threadlocal_loc = addr.plus(*NATIVE_SP_LOC_OFFSET);
//
// unsafe {
// fake_swap_mu_thread(sp_threadlocal_loc);
// }
true
}
......
......@@ -23,7 +23,6 @@ use compiler::{Compiler, CompilerPolicy};
use compiler::backend;
use compiler::backend::BackendTypeInfo;
use compiler::machine_code::CompiledFunction;
use compiler::frame::*;
use runtime::thread::*;
use runtime::*;
......@@ -84,7 +83,7 @@ pub struct VM {
// Maps each callsite to a tuple of the corresponding catch blocks label (or ""_
// and the id of the containing function-version
// 14
exception_table: RwLock<HashMap<MuName, (MuName, MuID)>>,
exception_table: RwLock<HashMap<MuID, HashMap<MuName, MuName>>>,
// ---do not serialize---
......@@ -101,7 +100,9 @@ pub struct VM {
// Same as above but once the everything have been resolved to addreses
// TODO: What should the function version refer to? (It has to refer to something that has callee saved registers...)
pub compiled_exception_table: HashMap<Address, (Address, *const CompiledFunction)>
// TODO: probably we should remove the pointer (its unsafe), thats why we need Sync/Send for VM
// we can make a copy of callee_saved_register location
pub compiled_exception_table: RwLock<HashMap<Address, (Address, *const CompiledFunction)>>
}
unsafe impl Sync for VM {}
unsafe impl Send for VM {}
......@@ -258,7 +259,7 @@ impl Encodable for VM {
field_i += 1;
trace!("...serializing exception_table");
{
let map : &HashMap<MuName, (MuName, MuID)> = &self.exception_table.read().unwrap();
let map : &HashMap<MuID, HashMap<MuName, MuName>> = &self.exception_table.read().unwrap();
try!(s.emit_struct_field("exception_table", field_i, |s| map.encode(s)));
}
field_i += 1;
......@@ -395,7 +396,7 @@ impl Decodable for VM {
compiled_funcs: RwLock::new(compiled_funcs),
exception_table: RwLock::new(exception_table),
aot_pending_funcref_store: RwLock::new(HashMap::new()),
compiled_exception_table: HashMap::new(),
compiled_exception_table: RwLock::new(HashMap::new()),
};
vm.next_id.store(next_id, Ordering::SeqCst);
......@@ -458,7 +459,7 @@ impl <'a> VM {
primordial: RwLock::new(None),
aot_pending_funcref_store: RwLock::new(HashMap::new()),
compiled_exception_table: HashMap::new(),
compiled_exception_table: RwLock::new(HashMap::new()),
};
// insert all intenral types
......@@ -533,13 +534,23 @@ impl <'a> VM {
}
pub fn add_exception_callsite(&self, callsite: MuName, catch: MuName, fv: MuID) {
self.exception_table.write().unwrap().insert(callsite, (catch, fv));
let mut table = self.exception_table.write().unwrap();
if table.contains_key(&fv) {
let mut map = table.get_mut(&fv).unwrap();
map.insert(callsite, catch);
} else {
let mut new_map = HashMap::new();
new_map.insert(callsite, catch);
table.insert(fv, new_map);
};
}
pub fn resume_vm(serialized_vm: &str) -> VM {
use rustc_serialize::json;
let mut vm : VM = json::decode(serialized_vm).unwrap();
let vm : VM = json::decode(serialized_vm).unwrap();
vm.init_runtime();
......@@ -574,23 +585,30 @@ impl <'a> VM {
}
}
// Construct Exception table
{
let exception_table = vm.exception_table.read().unwrap();
let compiled_funcs = vm.compiled_funcs.read().unwrap();
for (callsite, &(ref catch, ref fv)) in exception_table.iter() {
let ref compiled_func = *compiled_funcs.get(fv).unwrap().read().unwrap();
let catch_addr = if catch.is_empty() {
unsafe { Address::zero() }
// construct exception table
vm.build_exception_table();
vm
}
pub fn build_exception_table(&self) {
let exception_table = self.exception_table.read().unwrap();
let compiled_funcs = self.compiled_funcs.read().unwrap();
let mut compiled_exception_table = self.compiled_exception_table.write().unwrap();
for (fv, map) in exception_table.iter() {
let ref compiled_func = *compiled_funcs.get(fv).unwrap().read().unwrap();
for (callsite, catch_block) in map.iter() {
let catch_addr = if catch_block.is_empty() {
unsafe {Address::zero()}
} else {
resolve_symbol(catch.clone())
resolve_symbol(catch_block.clone())
};
vm.compiled_exception_table.insert(resolve_symbol(callsite.clone()), (catch_addr, &*compiled_func));
compiled_exception_table.insert(resolve_symbol(callsite.clone()), (catch_addr, &*compiled_func));
}
}
vm
}
pub fn next_id(&self) -> MuID {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment