Commit 0ced4d8a authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Implemented very fast exception handling (compared to before...)!

parent 2230463c
Pipeline #583 failed with stages
......@@ -26,6 +26,8 @@ use compiler::machine_code::CompiledFunction;
use compiler::frame::Frame;
use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any;
const INLINE_FASTPATH : bool = false;
......@@ -40,10 +42,14 @@ pub struct InstructionSelection {
current_block: Option<MuName>,
current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>,
// A list of all callsites, with the corresponding exception block (if there is one)
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>,
current_exn_blocks: HashMap<MuID, MuName>,
current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed)
current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>>
......@@ -68,8 +74,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6
current_func_start: None,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap::new(),
current_callsites: LinkedList::new(),
current_exn_blocks: HashMap::new(),
current_xr_value: None,
current_constants: HashMap::new(),
......@@ -3042,7 +3047,11 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_bl(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_bl(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block)
if cur_node.is_some() {
......@@ -3209,14 +3218,9 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) {
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
self.current_callsites.push_back((callsite.to_relocatable(), target_block));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0));
}
// deal with ret vals
......@@ -3284,22 +3288,21 @@ impl <'a> InstructionSelection {
}
// push all callee-saved registers
for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i];
for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg);
}
for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i];
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg);
}
// unload arguments
......@@ -3370,16 +3373,16 @@ impl <'a> InstructionSelection {
self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein);
// pop all callee-saved registers
for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i];
for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i];
let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_ldr_callee_saved(reg, &loc);
}
for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i];
for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i];
let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
......@@ -4093,7 +4096,7 @@ impl CompilerPass for InstructionSelection {
start_loc
});
self.current_callsite_id = 0;
self.current_exn_callsites.clear();
self.current_callsites.clear();
self.current_exn_blocks.clear();
self.current_constants.clear();
......@@ -4124,7 +4127,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc);
self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else {
// normal block
self.backend.start_block(block_label.clone());
......@@ -4160,7 +4163,7 @@ impl CompilerPass for InstructionSelection {
}
}
fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
self.emit_common_epilogue(&func.sig.ret_tys, &mut func.context, vm);
self.backend.print_cur_code();
......@@ -4178,23 +4181,19 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info
let mut frame = match self.current_frame.take() {
let frame = match self.current_frame.take() {
Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name)
};
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) {
Some(loc) => loc,
None => panic!("failed to find exception block {}", block_id)
};
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = if block_id == 0 {
String::new()
} else {
self.current_exn_blocks.get(&block_id).unwrap().clone()
};
for callsite in callsites {
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
}
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
......
This diff is collapsed.
......@@ -28,6 +28,8 @@ use utils::math;
use utils::POINTER_SIZE;
use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any;
lazy_static! {
......@@ -108,10 +110,12 @@ pub struct InstructionSelection {
current_block: Option<MuName>,
current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>,
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
// A list of pairs, the first is the name of a callsite the second
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>,
current_exn_blocks: HashMap<MuID, MuName>,
current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>>
......@@ -135,8 +139,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6
current_func_start: None,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap::new(),
current_callsites: LinkedList::new(),
current_exn_blocks: HashMap::new(),
current_constants: HashMap::new(),
......@@ -3166,8 +3169,11 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_call_near_rel32(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block)
if cur_node.is_some() {
let cur_node = cur_node.unwrap();
......@@ -3347,21 +3353,16 @@ impl <'a> InstructionSelection {
// record exception branch
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) {
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
self.current_callsites.push_back((callsite.to_relocatable(), target_block));
// insert an intermediate block to branch to normal
// the branch is inserted later (because we need to deal with postcall convention)
self.finish_block();
let fv_id = self.current_fv_id;
self.start_block(format!("normal_cont_for_call_{}_{}", fv_id, cur_node.id()));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0));
}
// deal with ret vals, collapse stack etc.
......@@ -4811,7 +4812,7 @@ impl CompilerPass for InstructionSelection {
start_loc
});
self.current_callsite_id = 0;
self.current_exn_callsites.clear();
self.current_callsites.clear();
self.current_exn_blocks.clear();
self.current_constants.clear();
......@@ -4843,7 +4844,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc);
self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else {
// normal block
self.backend.start_block(block_label.clone());
......@@ -4904,25 +4905,21 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info
let mut frame = match self.current_frame.take() {
let frame = match self.current_frame.take() {
Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name)
};
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) {
Some(loc) => loc,
None => panic!("failed to find exception block {}", block_id)
};
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = if block_id == 0 {
String::new()
} else {
self.current_exn_blocks.get(&block_id).unwrap().clone()
};
for callsite in callsites {
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
}
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
self.current_constants.clone(), self.current_constants_locs.clone(),
frame, self.current_func_start.take().unwrap(), func_end);
......
......@@ -13,6 +13,7 @@ pub use compiler::backend::x86_64::asm_backend::emit_context;
pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc;
#[cfg(feature = "aot")]
pub use compiler::backend::x86_64::asm_backend::spill_rewrite;
use utils::Address;
use ast::ptr::P;
use ast::ir::*;
......@@ -22,6 +23,9 @@ use compiler::backend::RegGroup;
use utils::LinkedHashMap;
use std::collections::HashMap;
// Number of nromal callee saved registers (excluding RSP and RBP)
pub const CALLEE_SAVED_COUNT : usize = 5;
macro_rules! GPR_ALIAS {
($alias: ident: ($id64: expr, $r64: ident) -> $r32: ident, $r16: ident, $r8l: ident, $r8h: ident) => {
lazy_static!{
......@@ -448,6 +452,47 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup {
RegGroup::get_from_value(reg)
}
// Gets the previouse frame pointer with respect to the current
#[inline(always)]
pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
unsafe { frame_pointer.load::<Address>() }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() }
}
// Gets the stack pointer before the current frame was created
#[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16)
}
#[inline(always)]
pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.store::<Address>(value) }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) }
}
// Reg should be a 64-bit callee saved GPR or FPR
pub fn get_callee_saved_offset(reg: MuID) -> isize {
debug_assert!(is_callee_saved(reg) && reg != RBP.id());
let id = if reg == RBX.id() {
0
} else {
(reg - R12.id())/4 + 1
};
(id as isize + 1)*(-8)
}
pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRs.iter() {
if reg_id == reg.extract_ssa_id().unwrap() {
......
......@@ -47,6 +47,20 @@ pub use compiler::backend::x86_64::pick_group_for_reg;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::is_callee_saved;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_callee_saved_offset;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_stack_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_code;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_context;
......@@ -82,6 +96,20 @@ pub use compiler::backend::aarch64::pick_group_for_reg;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::is_callee_saved;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_callee_saved_offset;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_stack_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_code;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_context;
......
use ast::ir::*;
use ast::ptr::*;
use ast::types::*;
use runtime::ValueLocation;
use compiler::backend::get_callee_saved_offset;
use std::fmt;
use std::collections::HashMap;
......@@ -25,8 +25,9 @@ pub struct Frame {
pub argument_by_stack: HashMap<MuID, P<Value>>,
pub allocated: HashMap<MuID, FrameSlot>,
// Maping from callee saved id (i.e. the position in the list of callee saved registers) and offset from the frame pointer
pub callee_saved: HashMap<isize, isize>,
// (callsite, destination address)
exception_callsites: Vec<(ValueLocation, ValueLocation)>
}
impl fmt::Display for Frame {
......@@ -37,9 +38,6 @@ impl fmt::Display for Frame {
writeln!(f, " {}", slot).unwrap();
}
writeln!(f, " exception callsites:").unwrap();
for &(ref callsite, ref dest) in self.exception_callsites.iter() {
writeln!(f, " callsite: {} -> {}", callsite, dest).unwrap()
}
writeln!(f, " cur offset: {}", self.cur_offset).unwrap();
writeln!(f, "}}")
}
......@@ -52,9 +50,8 @@ impl Frame {
cur_offset: 0,
argument_by_reg: HashMap::new(),
argument_by_stack: HashMap::new(),
callee_saved: HashMap::new(),
allocated: HashMap::new(),
exception_callsites: vec![]
}
}
......@@ -80,13 +77,21 @@ impl Frame {
}
pub fn alloc_slot_for_callee_saved_reg(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
let slot = self.alloc_slot(&reg, vm);
slot.make_memory_op(reg.ty.clone(), vm)
let (mem, off) = {
let slot = self.alloc_slot(&reg, vm);
(slot.make_memory_op(reg.ty.clone(), vm), slot.offset)
};
let o = get_callee_saved_offset(reg.id());
trace!("ISAAC: callee saved {} is at {}", reg, o);
self.callee_saved.insert(o, off);
mem
}
pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID) {
pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID)
{
self.allocated.remove(&reg);
let id = get_callee_saved_offset(reg);
self.callee_saved.remove(&id);
}
pub fn alloc_slot_for_spilling(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
......@@ -94,15 +99,6 @@ impl Frame {
slot.make_memory_op(reg.ty.clone(), vm)
}
pub fn get_exception_callsites(&self) -> &Vec<(ValueLocation, ValueLocation)> {
&self.exception_callsites
}
pub fn add_exception_callsite(&mut self, callsite: ValueLocation, dest: ValueLocation) {
trace!("add exception callsite: {} to dest {}", callsite, dest);
self.exception_callsites.push((callsite, dest));
}
#[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
pub fn alloc_slot(&mut self, val: &P<Value>, vm: &VM) -> &FrameSlot {
// RBP/FP is 16 bytes aligned, we are offsetting from RBP/FP
......@@ -111,7 +107,11 @@ impl Frame {
let backendty = vm.get_backend_type_info(val.ty.id());
if backendty.alignment > 16 {
unimplemented!()
if cfg!(target_arch="aarch64") {
panic!("A type cannot have alignment greater than 16 on aarch64")
} else {
unimplemented!()
}
}
self.cur_offset -= backendty.size as isize;
......
......@@ -39,29 +39,29 @@ pop_pair FP, LR
.endm
.macro push_callee_saved stack=SP
push_pair D8, D9, \stack
push_pair D10, D11, \stack
push_pair D12, D13, \stack
push_pair D14, D15, \stack
push_pair X19, X20, \stack
push_pair X21, X22, \stack
push_pair X23, X24, \stack
push_pair X25, X26, \stack
push_pair X27, X28, \stack
push_pair D8, D9, \stack
push_pair D10, D11, \stack
push_pair D12, D13, \stack
push_pair D14, D15, \stack
.endm
.macro pop_callee_saved stack=SP
pop_pair D15, D14, \stack
pop_pair D13, D12, \stack
pop_pair D11, D10, \stack
pop_pair D9, D8, \stack
pop_pair X28, X27, \stack
pop_pair X26, X25, \stack
pop_pair X24, X23, \stack
pop_pair X22, X21, \stack
pop_pair X20, X19, \stack
pop_pair D15, D14, \stack
pop_pair D13, D12, \stack
pop_pair D11, D10, \stack
pop_pair D9, D8, \stack
.endm
.macro pop_arguments stack=SP
......
This diff is collapsed.
This diff is collapsed.
......@@ -18,26 +18,59 @@ pub mod thread;
pub mod math;
pub mod entrypoints;
const PRINT_BACKTRACE : bool = false;
#[cfg(target_arch = "x86_64")]
#[path = "exception_x64.rs"]
pub mod exception;
#[cfg(target_arch = "aarch64")]
#[path = "exception_aarch64.rs"]
pub mod exception;
// consider using libloading crate instead of the raw c functions for dynalic libraries
// however i am not sure if libloading can load symbols from current process (not from an actual dylib)
// so here i use dlopen/dlsym from C
#[repr(C)]
struct Dl_info {
dli_fname: *const c_char,
dli_fbase: *mut c_void,
dli_sname: *const c_char,
dli_saddr: *mut c_void,
}
#[link(name="dl")]
extern "C" {
fn dlopen(filename: *const c_char, flags: isize) -> *const c_void;
fn dlsym(handle: *const c_void, symbol: *const c_char) -> *const c_void;
fn dladdr(addr: *mut c_void, info: *mut Dl_info) -> i32;
fn dlerror() -> *const c_char;
}
// TODO: this actually returns the name and address of the nearest symbol (of any type)
// that starts before function_addr (instead we want the nearest function symbol)
pub fn get_function_info(function_addr: Address) -> (CName, Address) {
use std::ptr;
// Rust requires this to be initialised
let mut info = Dl_info {
dli_fname: ptr::null::<c_char>(),
dli_fbase: ptr::null_mut::<c_void>(),
dli_sname: ptr::null::<c_char>(),
dli_saddr: ptr::null_mut::<c_void>(),
};
unsafe {dladdr(function_addr.to_ptr_mut::<c_void>(), &mut info)};
let error = unsafe {dlerror()};
if !error.is_null() {
let cstr = unsafe {CStr::from_ptr(error)};
error!("cannot find function address: {}", function_addr);
error!("{}", cstr.to_str().unwrap());
panic!("failed to resolve function address");
}
if !info.dli_sname.is_null() {
(unsafe {CStr::from_ptr(info.dli_sname)}.to_str().unwrap().to_string(), Address::from_ptr(info.dli_saddr))
} else {
("UNKOWN".to_string(), Address::from_ptr(info.dli_saddr))
}
}
pub fn resolve_symbol(symbol: String) -> Address {
use std::ptr;
......
......@@ -29,6 +29,7 @@ begin_func swap_to_mu_stack
ADR LR, entry_returned
# branch to entry
//MOV FP, 0 // End of call frame
RET X9
end_func swap_to_mu_stack
......@@ -80,28 +81,25 @@ end_func get_current_frame_rbp
# muentry_throw_exception(obj: Address)
# X0
begin_func muentry_throw_exception
# save all callee-saved
# save all callee-saved registers and pass tham as argument 2
push_pair LR, FP
MOV FP, SP
push_callee_saved
# SP points to the callee saved registers, pass this as 2nd argument
MOV X1, SP
MOV X1, FP // X1 is the frame pointer
#if defined (__linux__)
B throw_exception_internal
BL throw_exception_internal
#else
#error "Only Linux is supported. "
#endif
# won't return
# _exception_restore(dest: Address, callee_saved: *const Word, rsp: Address) -> !
# _exception_restore(dest: Address, frame_cursor: *const Word, sp: Address) -> !
# X0 X1 X2
# callee_saved: [x28, x27, x26, x25, x24, x23, x22, x21, x20, x19, d15, d14, d13, d12, d11, d10, d9, d8, fp, lr]
begin_func exception_restore
SUB X1, X1, #144 // Skip to the bottom of the frame cursor
// load each callee saved register relative to the stack pointer
pop_callee_saved X1
pop_pair FP, LR, X1 // TODO: Should we be restoring the link register?
pop_pair FP, LR, X1
MOV SP, X2
BR X0
end_func exception_restore
......
......@@ -74,15 +74,16 @@ end_func get_current_frame_rbp
# %rdi
begin_func muentry_throw_exception
# save all callee-saved
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbp
movq %rsp, %rbp
pushq %rbx
pushq %r12
pushq %r13
pushq %r14
pushq %r15
# %rsp points to %rbx, pass this as 2nd argument
movq %rsp, %rsi
movq %rbp, %rsi
#if defined (__linux__)
jmp CNAME(throw_exception_internal@PLT)
......@@ -97,12 +98,12 @@ begin_func muentry_throw_exception
# %rdi %rsi %rdx
# callee_saved: [rbx, rbp, r12, r13, r14, r15]
begin_func exception_restore
movq 0(%rsi), %rbx
movq 8(%rsi), %rbp
movq 16(%rsi),%r12
movq 24(%rsi),%r13
movq 32(%rsi),%r14
movq 40(%rsi),%r15
movq 0(%rsi), %rbp
movq -8(%rsi),%rbx
movq -16(%rsi),%r12
movq -24(%rsi),%r13
movq -32(%rsi),%r14
movq -40(%rsi),%r15
movq %rdx, %rsp
jmpq *%rdi
......
......@@ -9,8 +9,10 @@ use compiler::{Compiler, CompilerPolicy};
use compiler::backend;
use compiler::backend::BackendTypeInfo;
use compiler::machine_code::CompiledFunction;
</