Commit e04d88ed authored by qinsoon's avatar qinsoon

[wip] unwinding not correct

parent 071cf7e1
......@@ -1304,6 +1304,7 @@ pub fn emit_context(vm: &VM) {
// data
// serialize vm
trace!("start serializing vm");
{
let serialize_vm = json::encode(&vm).unwrap();
......
......@@ -378,10 +378,10 @@ impl <'a> InstructionSelection {
// emit a call to swap_back_to_native_stack(sp_loc: Address)
// get thread local and add offset to get sp_loc
let tl = self.emit_get_threadlocal(node, f_content, f_context, vm);
let tl = self.emit_get_threadlocal(Some(node), f_content, f_context, vm);
self.backend.emit_add_r64_imm32(&tl, *thread::NATIVE_SP_LOC_OFFSET as i32);
self.emit_runtime_entry(&entrypoints::SWAP_BACK_TO_NATIVE_STACK, vec![tl.clone()], None, node, f_content, f_context, vm);
self.emit_runtime_entry(&entrypoints::SWAP_BACK_TO_NATIVE_STACK, vec![tl.clone()], None, Some(node), f_content, f_context, vm);
}
Instruction_::New(ref ty) => {
......@@ -396,7 +396,7 @@ impl <'a> InstructionSelection {
// emit immix allocation fast path
// ASM: %tl = get_thread_local()
let tmp_tl = self.emit_get_threadlocal(node, f_content, f_context, vm);
let tmp_tl = self.emit_get_threadlocal(Some(node), f_content, f_context, vm);
// ASM: mov [%tl + allocator_offset + cursor_offset] -> %cursor
let cursor_offset = *thread::ALLOCATOR_OFFSET + *mm::ALLOCATOR_CURSOR_OFFSET;
......@@ -465,7 +465,7 @@ impl <'a> InstructionSelection {
Some(vec![
tmp_res.clone()
]),
node, f_content, f_context, vm
Some(node), f_content, f_context, vm
);
// end block (no liveout other than result)
......@@ -486,7 +486,7 @@ impl <'a> InstructionSelection {
&entrypoints::THROW_EXCEPTION,
vec![exception_obj.clone_value()],
None,
node, f_content, f_context, vm);
Some(node), f_content, f_context, vm);
}
_ => unimplemented!()
......@@ -544,7 +544,7 @@ impl <'a> InstructionSelection {
fn emit_get_threadlocal (
&mut self,
cur_node: &TreeNode,
cur_node: Option<&TreeNode>,
f_content: &FunctionContent,
f_context: &mut FunctionContext,
vm: &VM) -> P<Value> {
......@@ -562,7 +562,7 @@ impl <'a> InstructionSelection {
entry: &RuntimeEntrypoint,
args: Vec<P<Value>>,
rets: Option<Vec<P<Value>>>,
cur_node: &TreeNode,
cur_node: Option<&TreeNode>,
f_content: &FunctionContent,
f_context: &mut FunctionContext,
vm: &VM) -> Vec<P<Value>> {
......@@ -595,7 +595,7 @@ impl <'a> InstructionSelection {
sig: P<CFuncSig>,
args: Vec<P<Value>>,
rets: Option<Vec<P<Value>>>,
cur_node: &TreeNode,
cur_node: Option<&TreeNode>,
f_content: &FunctionContent,
f_context: &mut FunctionContext,
vm: &VM) -> Vec<P<Value>>
......@@ -643,12 +643,15 @@ impl <'a> InstructionSelection {
if vm.is_running() {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node.id());
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite, func_name);
// record exception block (CCall may have an exception block)
if cur_node.op == OpCode::CCall {
unimplemented!()
if cur_node.is_some() {
let cur_node = cur_node.unwrap();
if cur_node.op == OpCode::CCall {
unimplemented!()
}
}
}
......@@ -762,18 +765,18 @@ impl <'a> InstructionSelection {
if vm.is_running() {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node.id());
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_call_near_rel32(callsite, target.name().unwrap())
}
} else if self.match_ireg(func) {
let target = self.emit_ireg(func, f_content, f_context, vm);
let callsite = self.new_callsite_label(cur_node.id());
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_call_near_r64(callsite, &target)
} else if self.match_mem(func) {
let target = self.emit_mem(func);
let callsite = self.new_callsite_label(cur_node.id());
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_call_near_mem64(callsite, &target)
} else {
unimplemented!()
......@@ -1201,8 +1204,20 @@ impl <'a> InstructionSelection {
}
}
fn new_callsite_label(&mut self, node_id: MuID) -> String {
let ret = format!("callsite_{}_{}", node_id, self.current_callsite_id);
fn emit_landingpad(&mut self, exception_arg: &P<Value>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) {
// get thread local and add offset to get exception_obj
let tl = self.emit_get_threadlocal(None, f_content, f_context, vm);
self.emit_load_base_offset(exception_arg, &tl, *thread::EXCEPTION_OBJ_OFFSET as i32, vm);
}
fn new_callsite_label(&mut self, cur_node: Option<&TreeNode>) -> String {
let ret = {
if cur_node.is_some() {
format!("callsite_{}_{}", cur_node.unwrap().id(), self.current_callsite_id)
} else {
format!("callsite_anon_{}", self.current_callsite_id)
}
};
self.current_callsite_id += 1;
ret
}
......@@ -1242,20 +1257,27 @@ impl CompilerPass for InstructionSelection {
let block_label = block.name().unwrap();
self.current_block = Some(block_label.clone());
let block_content = block.content.as_ref().unwrap();
if block.is_exception_block() {
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc);
let exception_arg = block_content.exn_arg.as_ref().unwrap();
// live in is args of the block + exception arg
let mut livein = block_content.args.to_vec();
livein.push(exception_arg.clone());
self.backend.set_block_livein(block_label.clone(), &livein);
// need to insert a landing pad
unimplemented!()
self.emit_landingpad(&exception_arg, f_content, &mut func.context, vm);
} else {
self.backend.start_block(block_label.clone());
self.backend.start_block(block_label.clone());
// live in is args of the block
self.backend.set_block_livein(block_label.clone(), &block_content.args);
}
let block_content = block.content.as_ref().unwrap();
// live in is args of the block
self.backend.set_block_livein(block_label.clone(), &block_content.args);
// live out is the union of all branch args of this block
let live_out = block_content.get_out_arguments();
......
......@@ -25,8 +25,8 @@ pub struct Frame {
cur_offset: isize, // offset to rbp
pub allocated: HashMap<MuID, FrameSlot>,
// key: callsite, val: destination address
pub exception_callsites: HashMap<ValueLocation, ValueLocation>
// (callsite, destination address)
exception_callsites: Vec<(ValueLocation, ValueLocation)>
}
impl fmt::Display for Frame {
......@@ -37,7 +37,7 @@ impl fmt::Display for Frame {
writeln!(f, " {}", slot).unwrap();
}
writeln!(f, " exception callsites:").unwrap();
for (callsite, dest) in self.exception_callsites.iter() {
for &(ref callsite, ref dest) in self.exception_callsites.iter() {
writeln!(f, " callsite: {} -> {}", callsite, dest).unwrap()
}
writeln!(f, "}}")
......@@ -50,7 +50,7 @@ impl Frame {
func_ver_id: func_ver_id,
cur_offset: - (POINTER_SIZE as isize * 1), // reserve for old RBP
allocated: HashMap::new(),
exception_callsites: HashMap::new()
exception_callsites: vec![]
}
}
......@@ -64,9 +64,13 @@ impl Frame {
slot.make_memory_op(reg.ty.clone(), vm)
}
pub fn get_exception_callsites(&self) -> &Vec<(ValueLocation, ValueLocation)> {
&self.exception_callsites
}
pub fn add_exception_callsite(&mut self, callsite: ValueLocation, dest: ValueLocation) {
trace!("add exception callsite: {} to dest {}", callsite, dest);
self.exception_callsites.insert(callsite, dest);
self.exception_callsites.push((callsite, dest));
}
fn alloc_slot(&mut self, val: &P<Value>, vm: &VM) -> &FrameSlot {
......
......@@ -25,11 +25,18 @@ const CF_SERIALIZE_FIELDS : usize = 6;
impl Encodable for CompiledFunction {
fn encode<S: Encoder> (&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct("CompiledFunction", CF_SERIALIZE_FIELDS, |s| {
trace!("......serializing func_id");
try!(s.emit_struct_field("func_id", 0, |s| self.func_id.encode(s)));
trace!("......serializing func_ver_id");
try!(s.emit_struct_field("func_ver_id", 1, |s| self.func_ver_id.encode(s)));
trace!("......serializing temps");
try!(s.emit_struct_field("temps", 2, |s| self.temps.encode(s)));
trace!("......serializing frame");
trace!("{}", self.frame);
try!(s.emit_struct_field("frame", 3, |s| self.frame.encode(s)));
trace!("......serializing start");
try!(s.emit_struct_field("start", 4, |s| self.start.encode(s)));
trace!("......serializing end");
try!(s.emit_struct_field("end", 5, |s| self.end.encode(s)));
Ok(())
......
......@@ -3,6 +3,7 @@ use compiler::machine_code::CompiledFunction;
use compiler::frame::*;
use compiler::backend::x86_64;
use utils::Address;
use utils::Word;
use utils::POINTER_SIZE;
use runtime::thread;
......@@ -23,7 +24,8 @@ pub extern fn muentry_throw_exception(exception_obj: Address) {
// rbp of current frame (mu_throw_exception(), Rust frame)
let rust_frame_rbp = unsafe {thread::get_current_frame_rbp()};
trace!("current frame RBP: 0x{:x}", rust_frame_rbp);
trace!("current frame RBP: 0x{:x}", rust_frame_rbp);
inspect_nearby_address(rust_frame_rbp, 5);
let rust_frame_return_addr = unsafe {rust_frame_rbp.plus(POINTER_SIZE).load::<Address>()};
trace!("return address : 0x{:x} - throw instruction", rust_frame_return_addr);
......@@ -76,8 +78,8 @@ pub extern fn muentry_throw_exception(exception_obj: Address) {
// find exception block - comparing callsite with frame info
trace!("checking catch block: looking for callsite 0x{:x}", callsite);
let ref exception_callsites = frame.exception_callsites;
for (possible_callsite, dest) in exception_callsites {
let exception_callsites = frame.get_exception_callsites();
for &(ref possible_callsite, ref dest) in exception_callsites.iter() {
let possible_callsite_addr = possible_callsite.to_address();
if callsite == possible_callsite_addr {
......@@ -98,6 +100,18 @@ pub extern fn muentry_throw_exception(exception_obj: Address) {
}
}
fn inspect_nearby_address(base: Address, n: isize) {
let mut i = n;
while i >= -n {
unsafe {
let addr = base.offset(i * POINTER_SIZE as isize);
let val = addr.load::<Word>();
trace!("addr: 0x{:x} | val: 0x{:x}", addr, val);
}
i -= 1;
}
}
struct FrameCursor {
rbp: Address,
return_addr: Address,
......
......@@ -229,6 +229,8 @@ lazy_static! {
+ mem::size_of::<Option<Box<MuStack>>>()
+ mem::size_of::<Address>()
+ mem::size_of::<Option<Address>>();
pub static ref EXCEPTION_OBJ_OFFSET : usize = *VM_OFFSET + mem::size_of::<Arc<VM>>();
}
#[cfg(target_arch = "x86_64")]
......
......@@ -19,12 +19,10 @@ impl Address {
}
#[inline(always)]
pub fn offset(&self, offset: isize) -> Self {
debug_assert!((self.0 as isize) < 0);
Address((self.0 as isize + offset) as usize)
}
#[inline(always)]
pub fn shift<T>(&self, offset: isize) -> Self {
debug_assert!((self.0 as isize) < 0);
Address((self.0 as isize + mem::size_of::<T>() as isize * offset) as usize)
}
#[inline(always)]
......
use ast::ir::*;
use compiler::frame::*;
use std::ops;
use std::collections::HashMap;
pub struct CompiledFunction {
pub func_id: MuID,
pub func_ver_id: MuID,
pub temps: HashMap<MuID, MuID>, // assumes one temperary maps to one register
pub mc: Box<MachineCode>,
pub frame: Frame
}
pub trait MachineCode {
fn trace_mc(&self);
fn trace_inst(&self, index: usize);
fn emit(&self) -> Vec<u8>;
fn number_of_insts(&self) -> usize;
fn is_move(&self, index: usize) -> bool;
fn is_using_mem_op(&self, index: usize) -> bool;
fn get_succs(&self, index: usize) -> &Vec<usize>;
fn get_preds(&self, index: usize) -> &Vec<usize>;
fn get_inst_reg_uses(&self, index: usize) -> &Vec<MuID>;
fn get_inst_reg_defines(&self, index: usize) -> &Vec<MuID>;
fn get_ir_block_livein(&self, block: &str) -> Option<&Vec<MuID>>;
fn get_ir_block_liveout(&self, block: &str) -> Option<&Vec<MuID>>;
fn set_ir_block_livein(&mut self, block: &str, set: Vec<MuID>);
fn set_ir_block_liveout(&mut self, block: &str, set: Vec<MuID>);
fn get_all_blocks(&self) -> &Vec<MuName>;
fn get_block_range(&self, block: &str) -> Option<ops::Range<usize>>;
fn replace_reg(&mut self, from: MuID, to: MuID);
fn set_inst_nop(&mut self, index: usize);
}
......@@ -62,58 +62,68 @@ impl Encodable for VM {
// PLUS ONE extra global STRUCT_TAG_MAP
s.emit_struct("VM", VM_SERIALIZE_FIELDS + 1, |s| {
// next_id
trace!("...serializing next_id");
try!(s.emit_struct_field("next_id", 0, |s| {
s.emit_usize(self.next_id.load(Ordering::SeqCst))
}));
// id_name_map
trace!("...serializing id_name_map");
{
let map : &HashMap<MuID, MuName> = &self.id_name_map.read().unwrap();
try!(s.emit_struct_field("id_name_map", 1, |s| map.encode(s)));
}
// name_id_map
trace!("...serializing name_id_map");
{
let map : &HashMap<MuName, MuID> = &self.name_id_map.read().unwrap();
try!(s.emit_struct_field("name_id_map", 2, |s| map.encode(s)));
}
// types
trace!("...serializing types");
{
let types = &self.types.read().unwrap();
try!(s.emit_struct_field("types", 3, |s| types.encode(s)));
}
// STRUCT_TAG_MAP
trace!("...serializing struct_tag_map");
{
let struct_tag_map = types::STRUCT_TAG_MAP.read().unwrap();
try!(s.emit_struct_field("struct_tag_map", 4, |s| struct_tag_map.encode(s)));
}
// backend_type_info
trace!("...serializing backend_type_info");
{
let backend_type_info : &HashMap<_, _> = &self.backend_type_info.read().unwrap();
try!(s.emit_struct_field("backend_type_info", 5, |s| backend_type_info.encode(s)));
}
// constants
trace!("...serializing constants");
{
let constants : &HashMap<_, _> = &self.constants.read().unwrap();
try!(s.emit_struct_field("constants", 6, |s| constants.encode(s)));
}
// globals
trace!("...serializing globals");
{
let globals: &HashMap<_, _> = &self.globals.read().unwrap();
try!(s.emit_struct_field("globals", 7, |s| globals.encode(s)));
}
// func sigs
trace!("...serializing func_sigs");
{
let func_sigs: &HashMap<_, _> = &self.func_sigs.read().unwrap();
try!(s.emit_struct_field("func_sigs", 8, |s| func_sigs.encode(s)));
}
// funcs
trace!("...serializing funcs");
{
let funcs : &HashMap<_, _> = &self.funcs.read().unwrap();
try!(s.emit_struct_field("funcs", 9, |s| {
......@@ -131,6 +141,7 @@ impl Encodable for VM {
}
// func_vers
trace!("...serializing func_vers");
{
let func_vers : &HashMap<_, _> = &self.func_vers.read().unwrap();
try!(s.emit_struct_field("func_vers", 10, |s| {
......@@ -148,17 +159,20 @@ impl Encodable for VM {
}
// primordial
trace!("...serializing primordial");
{
let primordial = &self.primordial.read().unwrap();
try!(s.emit_struct_field("primordial", 11, |s| primordial.encode(s)));
}
// is_running
trace!("...serializing is_running");
{
try!(s.emit_struct_field("is_running", 12, |s| self.is_running.load(Ordering::SeqCst).encode(s)));
}
// compiled_funcs
trace!("...serializing compiled_funcs");
{
let compiled_funcs : &HashMap<_, _> = &self.compiled_funcs.read().unwrap();
try!(s.emit_struct_field("compiled_funcs", 13, |s| {
......@@ -175,6 +189,7 @@ impl Encodable for VM {
}));
}
trace!("serializing finished");
Ok(())
})
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment