Commit 0ced4d8a authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Implemented very fast exception handling (compared to before...)!

parent 2230463c
......@@ -26,6 +26,8 @@ use compiler::machine_code::CompiledFunction;
use compiler::frame::Frame;
use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any;
const INLINE_FASTPATH : bool = false;
......@@ -40,10 +42,14 @@ pub struct InstructionSelection {
current_block: Option<MuName>,
current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>,
// A list of all callsites, with the corresponding exception block (if there is one)
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>,
current_exn_blocks: HashMap<MuID, MuName>,
current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed)
current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>>
......@@ -68,8 +74,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6
current_func_start: None,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap::new(),
current_callsites: LinkedList::new(),
current_exn_blocks: HashMap::new(),
current_xr_value: None,
current_constants: HashMap::new(),
......@@ -3042,7 +3047,11 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_bl(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_bl(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block)
if cur_node.is_some() {
......@@ -3209,14 +3218,9 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) {
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
self.current_callsites.push_back((callsite.to_relocatable(), target_block));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0));
}
// deal with ret vals
......@@ -3284,22 +3288,21 @@ impl <'a> InstructionSelection {
}
// push all callee-saved registers
for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i];
for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg);
}
for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i];
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg);
}
// unload arguments
......@@ -3370,16 +3373,16 @@ impl <'a> InstructionSelection {
self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein);
// pop all callee-saved registers
for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i];
for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i];
let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_ldr_callee_saved(reg, &loc);
}
for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i];
for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i];
let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
......@@ -4093,7 +4096,7 @@ impl CompilerPass for InstructionSelection {
start_loc
});
self.current_callsite_id = 0;
self.current_exn_callsites.clear();
self.current_callsites.clear();
self.current_exn_blocks.clear();
self.current_constants.clear();
......@@ -4124,7 +4127,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc);
self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else {
// normal block
self.backend.start_block(block_label.clone());
......@@ -4160,7 +4163,7 @@ impl CompilerPass for InstructionSelection {
}
}
fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
self.emit_common_epilogue(&func.sig.ret_tys, &mut func.context, vm);
self.backend.print_cur_code();
......@@ -4178,23 +4181,19 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info
let mut frame = match self.current_frame.take() {
let frame = match self.current_frame.take() {
Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name)
};
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) {
Some(loc) => loc,
None => panic!("failed to find exception block {}", block_id)
};
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = if block_id == 0 {
String::new()
} else {
self.current_exn_blocks.get(&block_id).unwrap().clone()
};
for callsite in callsites {
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
}
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -28,6 +28,8 @@ use utils::math;
use utils::POINTER_SIZE;
use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any;
lazy_static! {
......@@ -108,10 +110,12 @@ pub struct InstructionSelection {
current_block: Option<MuName>,
current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>,
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
// A list of pairs, the first is the name of a callsite the second
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>,
current_exn_blocks: HashMap<MuID, MuName>,
current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>>
......@@ -135,8 +139,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6
current_func_start: None,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap::new(),
current_callsites: LinkedList::new(),
current_exn_blocks: HashMap::new(),
current_constants: HashMap::new(),
......@@ -3166,8 +3169,11 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_call_near_rel32(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block)
if cur_node.is_some() {
let cur_node = cur_node.unwrap();
......@@ -3347,21 +3353,16 @@ impl <'a> InstructionSelection {
// record exception branch
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) {
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
self.current_callsites.push_back((callsite.to_relocatable(), target_block));
// insert an intermediate block to branch to normal
// the branch is inserted later (because we need to deal with postcall convention)
self.finish_block();
let fv_id = self.current_fv_id;
self.start_block(format!("normal_cont_for_call_{}_{}", fv_id, cur_node.id()));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0));
}
// deal with ret vals, collapse stack etc.
......@@ -4811,7 +4812,7 @@ impl CompilerPass for InstructionSelection {
start_loc
});
self.current_callsite_id = 0;
self.current_exn_callsites.clear();
self.current_callsites.clear();
self.current_exn_blocks.clear();
self.current_constants.clear();
......@@ -4843,7 +4844,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc);
self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else {
// normal block
self.backend.start_block(block_label.clone());
......@@ -4904,25 +4905,21 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info
let mut frame = match self.current_frame.take() {
let frame = match self.current_frame.take() {
Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name)
};
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) {
Some(loc) => loc,
None => panic!("failed to find exception block {}", block_id)
};
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = if block_id == 0 {
String::new()
} else {
self.current_exn_blocks.get(&block_id).unwrap().clone()
};
for callsite in callsites {
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
}
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
self.current_constants.clone(), self.current_constants_locs.clone(),
frame, self.current_func_start.take().unwrap(), func_end);
......
......@@ -13,6 +13,7 @@ pub use compiler::backend::x86_64::asm_backend::emit_context;
pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc;
#[cfg(feature = "aot")]
pub use compiler::backend::x86_64::asm_backend::spill_rewrite;
use utils::Address;
use ast::ptr::P;
use ast::ir::*;
......@@ -22,6 +23,9 @@ use compiler::backend::RegGroup;
use utils::LinkedHashMap;
use std::collections::HashMap;
// Number of nromal callee saved registers (excluding RSP and RBP)
pub const CALLEE_SAVED_COUNT : usize = 5;
macro_rules! GPR_ALIAS {
($alias: ident: ($id64: expr, $r64: ident) -> $r32: ident, $r16: ident, $r8l: ident, $r8h: ident) => {
lazy_static!{
......@@ -448,6 +452,47 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup {
RegGroup::get_from_value(reg)
}
// Gets the previouse frame pointer with respect to the current
#[inline(always)]
pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
unsafe { frame_pointer.load::<Address>() }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() }
}
// Gets the stack pointer before the current frame was created
#[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16)
}
#[inline(always)]
pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.store::<Address>(value) }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) }
}
// Reg should be a 64-bit callee saved GPR or FPR
pub fn get_callee_saved_offset(reg: MuID) -> isize {
debug_assert!(is_callee_saved(reg) && reg != RBP.id());
let id = if reg == RBX.id() {
0
} else {
(reg - R12.id())/4 + 1
};
(id as isize + 1)*(-8)
}
pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRs.iter() {
if reg_id == reg.extract_ssa_id().unwrap() {
......
......@@ -47,6 +47,20 @@ pub use compiler::backend::x86_64::pick_group_for_reg;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::is_callee_saved;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_callee_saved_offset;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_stack_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_code;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_context;
......@@ -82,6 +96,20 @@ pub use compiler::backend::aarch64::pick_group_for_reg;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::is_callee_saved;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_callee_saved_offset;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_stack_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_code;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_context;
......
use ast::ir::*;
use ast::ptr::*;
use ast::types::*;
use runtime::ValueLocation;
use compiler::backend::get_callee_saved_offset;
use std::fmt;
use std::collections::HashMap;
......@@ -25,8 +25,9 @@ pub struct Frame {
pub argument_by_stack: HashMap<MuID, P<Value>>,
pub allocated: HashMap<MuID, FrameSlot>,
// Maping from callee saved id (i.e. the position in the list of callee saved registers) and offset from the frame pointer
pub callee_saved: HashMap<isize, isize>,
// (callsite, destination address)
exception_callsites: Vec<(ValueLocation, ValueLocation)>
}
impl fmt::Display for Frame {
......@@ -37,9 +38,6 @@ impl fmt::Display for Frame {
writeln!(f, " {}", slot).unwrap();
}
writeln!(f, " exception callsites:").unwrap();
for &(ref callsite, ref dest) in self.exception_callsites.iter() {
writeln!(f, " callsite: {} -> {}", callsite, dest).unwrap()
}
writeln!(f, " cur offset: {}", self.cur_offset).unwrap();
writeln!(f, "}}")
}
......@@ -52,9 +50,8 @@ impl Frame {
cur_offset: 0,
argument_by_reg: HashMap::new(),
argument_by_stack: HashMap::new(),
callee_saved: HashMap::new(),
allocated: HashMap::new(),
exception_callsites: vec![]
}
}
......@@ -80,13 +77,21 @@ impl Frame {
}
pub fn alloc_slot_for_callee_saved_reg(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
let slot = self.alloc_slot(&reg, vm);
slot.make_memory_op(reg.ty.clone(), vm)
let (mem, off) = {
let slot = self.alloc_slot(&reg, vm);
(slot.make_memory_op(reg.ty.clone(), vm), slot.offset)
};
let o = get_callee_saved_offset(reg.id());
trace!("ISAAC: callee saved {} is at {}", reg, o);
self.callee_saved.insert(o, off);
mem
}
pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID) {
pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID)
{
self.allocated.remove(&reg);
let id = get_callee_saved_offset(reg);
self.callee_saved.remove(&id);
}
pub fn alloc_slot_for_spilling(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
......@@ -94,15 +99,6 @@ impl Frame {
slot.make_memory_op(reg.ty.clone(), vm)
}
pub fn get_exception_callsites(&self) -> &Vec<(ValueLocation, ValueLocation)> {
&self.exception_callsites
}
pub fn add_exception_callsite(&mut self, callsite: ValueLocation, dest: ValueLocation) {
trace!("add exception callsite: {} to dest {}", callsite, dest);
self.exception_callsites.push((callsite, dest));
}
#[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
pub fn alloc_slot(&mut self, val: &P<Value>, vm: &VM) -> &FrameSlot {
// RBP/FP is 16 bytes aligned, we are offsetting from RBP/FP
......@@ -111,7 +107,11 @@ impl Frame {
let backendty = vm.get_backend_type_info(val.ty.id());
if backendty.alignment > 16 {
unimplemented!()
if cfg!(target_arch="aarch64") {
panic!("A type cannot have alignment greater than 16 on aarch64")
} else {
unimplemented!()
}
}
self.cur_offset -= backendty.size as isize;
......
......@@ -39,29 +39,29 @@ pop_pair FP, LR
.endm
.macro push_callee_saved stack=SP
push_pair D8, D9, \stack
push_pair D10, D11, \stack
push_pair D12, D13, \stack
push_pair D14, D15, \stack
push_pair X19, X20, \stack
push_pair X21, X22, \stack
push_pair X23, X24, \stack
push_pair X25, X26, \stack
push_pair X27, X28, \stack
push_pair D8, D9, \stack
push_pair D10, D11, \stack
push_pair D12, D13, \stack
push_pair D14, D15, \stack
.endm
.macro pop_callee_saved stack=SP
pop_pair D15, D14, \stack
pop_pair D13, D12, \stack
pop_pair D11, D10, \stack
pop_pair D9, D8, \stack
pop_pair X28, X27, \stack
pop_pair X26, X25, \stack
pop_pair X24, X23, \stack
pop_pair X22, X21, \stack
pop_pair X20, X19, \stack
pop_pair D15, D14, \stack
pop_pair D13, D12, \stack
pop_pair D11, D10, \stack
pop_pair D9, D8, \stack
.endm
.macro pop_arguments stack=SP
......
use ast::ir::*;
use compiler::machine_code::CompiledFunction;
use compiler::backend::aarch64;
use utils::Address;
use utils::Word;
use utils::POINTER_SIZE;
use runtime::thread;
use std::sync::RwLock;
use std::sync::RwLockReadGuard;
use std::collections::HashMap;
use std::fmt;
// muentry_throw_exception in swap_stack_aarch64_sysv.S
// is like a special calling convention to throw_exception_internal
// in order to save all the callee saved registers at a known location
// normal calling convention:
// ---code--- ---stack---
// push caller saved caller saved
// call
// -> (in callee) push LR, FP LR, old FP
// MOV SP -> FP callee saved
// push callee saved
// this function's calling convention
// ---code--- ---stack---
// push caller saved caller saved
// call LR, old FP
// -> (in asm) push callee saved all callee saved <- 2nd arg
// (in rust) push LR, FP (by rust) LR, old FP
// mov SP -> FP (by rust) callee saved
// push callee saved
// we do not want to make any assumptionon where rust saves rbp or callee saved
// so we save them by ourselves in assembly, and pass a pointer as 2nd argument
#[no_mangle]
#[allow(unreachable_code)]
// last_frame_callee_saved: a pointer passed from assembly, values of 6 callee_saved
// registers are layed out as rbx, rbp, r12-r15 (from low address to high address)
// and return address is put after 6 callee saved regsiters
pub extern fn throw_exception_internal(exception_obj: Address, last_frame_callee_saved: Address) -> ! {
trace!("throwing exception: {}", exception_obj);
trace!("callee saved registers of last frame is saved at {}", last_frame_callee_saved);
if cfg!(debug_assertions) {
inspect_higher_address(last_frame_callee_saved, 20);
}
let mut cur_thread = thread::MuThread::current_mut();
// set exception object
cur_thread.exception_obj = exception_obj;
let cf_lock = cur_thread.vm.compiled_funcs().read().unwrap();
let func_lock = cur_thread.vm.funcs().read().unwrap();
let rust_frame_return_addr = unsafe {last_frame_callee_saved.plus(POINTER_SIZE * 19).load::<Address>()};
trace!("return address : 0x{:x} - throw instruction", rust_frame_return_addr);
// the return address is within throwing frame
let throw_frame_callsite = rust_frame_return_addr;
let (throw_func, throw_fv) = find_func_for_address(&cf_lock, &func_lock, throw_frame_callsite).unwrap();
trace!("throwing fucntion: {}", throw_func);
// skip to previous frame
// this is the frame that throws the exception
let previous_frame_fp_loc = last_frame_callee_saved.plus(POINTER_SIZE * 18);
let fp = unsafe {previous_frame_fp_loc.load::<Address>()};
trace!("FP of previous frame is {} (last_frame_callee_saved {} + 144)", fp, last_frame_callee_saved);
// set cursor to throwing frame
let mut cursor = FrameCursor {
fp: fp,
return_addr: unsafe {fp.plus(POINTER_SIZE).load::<Address>()},
func_id: throw_func,
func_ver_id: throw_fv,
callee_saved_locs: hashmap!{
//aarch64::LR.id() => last_frame_callee_saved.plus(POINTER_SIZE * 19),
//aarch64::FP.id() => last_frame_callee_saved.plus(POINTER_SIZE * 18),
aarch64::D8.id() => last_frame_callee_saved.plus(POINTER_SIZE * 17),
aarch64::D9.id() => last_frame_callee_saved.plus(POINTER_SIZE * 16),
aarch64::D10.id() => last_frame_callee_saved.plus(POINTER_SIZE * 15),
aarch64::D11.id() => last_frame_callee_saved.plus(POINTER_SIZE * 14),
aarch64::D12.id() => last_frame_callee_saved.plus(POINTER_SIZE * 13),
aarch64::D13.id() => last_frame_callee_saved.plus(POINTER_SIZE * 12),
aarch64::D14.id() => last_frame_callee_saved.plus(POINTER_SIZE * 11),
aarch64::D15.id() => last_frame_callee_saved.plus(POINTER_SIZE * 10),
aarch64::X19.id() => last_frame_callee_saved.plus(POINTER_SIZE * 9),
aarch64::X20.id() => last_frame_callee_saved.plus(POINTER_SIZE * 8),
aarch64::X21.id() => last_frame_callee_saved.plus(POINTER_SIZE * 7),
aarch64::X22.id() => last_frame_callee_saved.plus(POINTER_SIZE * 6),
aarch64::X23.id() => last_frame_callee_saved.plus(POINTER_SIZE * 5),
aarch64::X24.id() => last_frame_callee_saved.plus(POINTER_SIZE * 4),
aarch64::X25.id() => last_frame_callee_saved.plus(POINTER_SIZE * 3),
aarch64::X26.id() => last_frame_callee_saved.plus(POINTER_SIZE * 2),
aarch64::X27.id() => last_frame_callee_saved.plus(POINTER_SIZE * 1),
aarch64::X28.id() => last_frame_callee_saved.plus(POINTER_SIZE * 0),
}
};
if cfg!(debug_assertions) {
print_backtrace(throw_frame_callsite, cursor.clone());
}
let mut callsite = rust_frame_return_addr;
trace!("Stack Unwinding starts");
loop {
trace!("frame cursor: {}", cursor);
// release the locks, and keep a clone of the frame
// because we may improperly leave this function
let frame = {
let rwlock_cf = match cf_lock.get(&cursor.func_ver_id) {
Some(ret) => ret,
None => panic!("cannot find compiled func with func_id {}, possibly didnt find the right frame for return address", cursor.func_id)
};
let rwlock_cf = rwlock_cf.read().unwrap();
rwlock_cf.frame.clone()
};
trace!("frame info: {}", frame);
// find exception block - comparing callsite with frame info
trace!("checking catch block: looking for callsite 0x{:x}", callsite);
let exception_callsites = frame.get_exception_callsites();
for &(ref possible_callsite, ref dest) in exception_callsites.iter() {
let possible_callsite_addr = possible_callsite.to_address();
trace!("..check {} at 0x{:x}", possible_callsite, possible_callsite_addr);
if callsite == possible_callsite_addr {
trace!("found catch block at {}", dest);
// found an exception block
let dest_addr = dest.to_address();
// restore callee saved register and jump to dest_addr
// prepare a plain array [rbx, rbp, r12, r13, r14, r15]
macro_rules! unpack_callee_saved_from_cursor {
($reg: expr) => {
match cursor.callee_saved_locs.get(&$reg.id()) {
Some(addr) => unsafe {addr.load::<Word>()},
None => {
info!("no {} value was saved along unwinding", $reg.name().unwrap());
0
}
}