Commit 0ced4d8a authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Implemented very fast exception handling (compared to before...)!

parent 2230463c
......@@ -26,6 +26,8 @@ use compiler::machine_code::CompiledFunction;
use compiler::frame::Frame;
use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any;
const INLINE_FASTPATH : bool = false;
......@@ -40,10 +42,14 @@ pub struct InstructionSelection {
current_block: Option<MuName>,
current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>,
// A list of all callsites, with the corresponding exception block (if there is one)
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>,
current_exn_blocks: HashMap<MuID, MuName>,
current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed)
current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>>
......@@ -68,8 +74,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6
current_func_start: None,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap::new(),
current_callsites: LinkedList::new(),
current_exn_blocks: HashMap::new(),
current_xr_value: None,
current_constants: HashMap::new(),
......@@ -3042,7 +3047,11 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_bl(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_bl(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block)
if cur_node.is_some() {
......@@ -3209,14 +3218,9 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) {
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
self.current_callsites.push_back((callsite.to_relocatable(), target_block));
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
self.current_callsites.push_back((callsite.to_relocatable(), 0));
}
// deal with ret vals
......@@ -3284,22 +3288,21 @@ impl <'a> InstructionSelection {
}
// push all callee-saved registers
for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i];
for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg);
}
for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i];
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg);
}
// unload arguments
......@@ -3370,16 +3373,16 @@ impl <'a> InstructionSelection {
self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein);
// pop all callee-saved registers
for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i];
for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i];
let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_ldr_callee_saved(reg, &loc);
}
for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i];
for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i];
let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
......@@ -4093,7 +4096,7 @@ impl CompilerPass for InstructionSelection {
start_loc
});
self.current_callsite_id = 0;
self.current_exn_callsites.clear();
self.current_callsites.clear();
self.current_exn_blocks.clear();
self.current_constants.clear();
......@@ -4124,7 +4127,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc);
self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else {
// normal block
self.backend.start_block(block_label.clone());
......@@ -4178,23 +4181,19 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info
let mut frame = match self.current_frame.take() {
let frame = match self.current_frame.take() {
Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name)
};
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) {
Some(loc) => loc,
None => panic!("failed to find exception block {}", block_id)
};
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = if block_id == 0 {
String::new()
} else {
self.current_exn_blocks.get(&block_id).unwrap().clone()
};
for callsite in callsites {
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
}
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
......
......@@ -5,7 +5,6 @@
#![allow(non_upper_case_globals)]
// TODO: Move architecture independent codes in here, inst_sel and asm_backend to somewhere else...
pub mod inst_sel;
......@@ -17,6 +16,8 @@ pub use compiler::backend::aarch64::asm_backend::ASMCodeGen;
pub use compiler::backend::aarch64::asm_backend::emit_code;
pub use compiler::backend::aarch64::asm_backend::emit_context;
pub use compiler::backend::aarch64::asm_backend::emit_context_with_reloc;
use utils::Address;
#[cfg(feature = "aot")]
pub use compiler::backend::aarch64::asm_backend::spill_rewrite;
......@@ -30,6 +31,9 @@ use vm::VM;
use utils::LinkedHashMap;
use std::collections::HashMap;
// Number of nromal callee saved registers (excluding FP and LR, and SP)
pub const CALLEE_SAVED_COUNT : usize = 18;
macro_rules! REGISTER {
($id:expr, $name: expr, $ty: ident) => {
{
......@@ -111,7 +115,7 @@ GPR_ALIAS!(XZR_ALIAS: (64, XZR) -> WZR); // Pseudo register, not to be used by
ALIAS!(X8 -> XR); // Indirect result location register (points to a location in memory to write return values to)
ALIAS!(X16 -> IP0); // Intra proecdure call register 0 (may be modified by the linker when executing BL/BLR instructions)
ALIAS!(X17 -> IP1);// Intra proecdure call register 1 (may be modified by the linker when executing BL/BLR instructions)
ALIAS!(X18 -> PR); // Platform Register (NEVER TOUCH THIS REGISTER (Unless you can proove Linux dosn't use it))
ALIAS!(X18 -> PR); // Platform Register (NEVER TOUCH THIS REGISTER (Unless you can prove Linux dosn't use it))
ALIAS!(X29 -> FP); // Frame Pointer (can be used as a normal register when not calling or returning)
ALIAS!(X30 -> LR); // Link Register (not supposed to be used for any other purpose)
......@@ -760,6 +764,59 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup {
}
}
// Gets the previouse frame pointer with respect to the current
#[inline(always)]
pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
unsafe { frame_pointer.load::<Address>() }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() }
}
// Gets the stack pointer before the current frame was created
#[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16)
}
#[inline(always)]
pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.store::<Address>(value) }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) }
}
// Reg should be a 64-bit callee saved GPR or FPR
pub fn get_callee_saved_offset(reg: MuID) -> isize {
debug_assert!(is_callee_saved(reg));
let id = if reg < FPR_ID_START {
(reg - CALLEE_SAVED_GPRs[0].id())/2
} else {
(reg - CALLEE_SAVED_FPRs[0].id()) / 2 + CALLEE_SAVED_GPRs.len()
};
(id as isize + 1)*(-8)
}
// Returns the callee saved register with the id...
/*pub fn get_callee_saved_register(offset: isize) -> P<Value> {
debug_assert!(offset <= -8 && (-offset) % 8 == 0);
let id = ((offset/-8) - 1) as usize;
if id < CALLEE_SAVED_GPRs.len() {
CALLEE_SAVED_GPRs[id].clone()
} else if id - CALLEE_SAVED_GPRs.len() < CALLEE_SAVED_FPRs.len() {
CALLEE_SAVED_FPRs[id - CALLEE_SAVED_GPRs.len()].clone()
} else {
panic!("There is no callee saved register with id {}", offset)
}
}*/
pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRs.iter() {
......
......@@ -28,6 +28,8 @@ use utils::math;
use utils::POINTER_SIZE;
use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any;
lazy_static! {
......@@ -108,10 +110,12 @@ pub struct InstructionSelection {
current_block: Option<MuName>,
current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>,
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
// A list of pairs, the first is the name of a callsite the second
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>,
current_exn_blocks: HashMap<MuID, MuName>,
current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>>
......@@ -135,8 +139,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6
current_func_start: None,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap::new(),
current_callsites: LinkedList::new(),
current_exn_blocks: HashMap::new(),
current_constants: HashMap::new(),
......@@ -3166,7 +3169,10 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_call_near_rel32(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block)
if cur_node.is_some() {
......@@ -3348,20 +3354,15 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) {
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
self.current_callsites.push_back((callsite.to_relocatable(), target_block));
// insert an intermediate block to branch to normal
// the branch is inserted later (because we need to deal with postcall convention)
self.finish_block();
let fv_id = self.current_fv_id;
self.start_block(format!("normal_cont_for_call_{}_{}", fv_id, cur_node.id()));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0));
}
// deal with ret vals, collapse stack etc.
......@@ -4811,7 +4812,7 @@ impl CompilerPass for InstructionSelection {
start_loc
});
self.current_callsite_id = 0;
self.current_exn_callsites.clear();
self.current_callsites.clear();
self.current_exn_blocks.clear();
self.current_constants.clear();
......@@ -4843,7 +4844,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc);
self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else {
// normal block
self.backend.start_block(block_label.clone());
......@@ -4904,25 +4905,21 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info
let mut frame = match self.current_frame.take() {
let frame = match self.current_frame.take() {
Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name)
};
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) {
Some(loc) => loc,
None => panic!("failed to find exception block {}", block_id)
};
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = if block_id == 0 {
String::new()
} else {
self.current_exn_blocks.get(&block_id).unwrap().clone()
};
for callsite in callsites {
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
}
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
self.current_constants.clone(), self.current_constants_locs.clone(),
frame, self.current_func_start.take().unwrap(), func_end);
......
......@@ -13,6 +13,7 @@ pub use compiler::backend::x86_64::asm_backend::emit_context;
pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc;
#[cfg(feature = "aot")]
pub use compiler::backend::x86_64::asm_backend::spill_rewrite;
use utils::Address;
use ast::ptr::P;
use ast::ir::*;
......@@ -22,6 +23,9 @@ use compiler::backend::RegGroup;
use utils::LinkedHashMap;
use std::collections::HashMap;
// Number of nromal callee saved registers (excluding RSP and RBP)
pub const CALLEE_SAVED_COUNT : usize = 5;
macro_rules! GPR_ALIAS {
($alias: ident: ($id64: expr, $r64: ident) -> $r32: ident, $r16: ident, $r8l: ident, $r8h: ident) => {
lazy_static!{
......@@ -448,6 +452,47 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup {
RegGroup::get_from_value(reg)
}
// Gets the previouse frame pointer with respect to the current
#[inline(always)]
pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
unsafe { frame_pointer.load::<Address>() }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() }
}
// Gets the stack pointer before the current frame was created
#[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16)
}
#[inline(always)]
pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.store::<Address>(value) }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) }
}
// Reg should be a 64-bit callee saved GPR or FPR
pub fn get_callee_saved_offset(reg: MuID) -> isize {
debug_assert!(is_callee_saved(reg) && reg != RBP.id());
let id = if reg == RBX.id() {
0
} else {
(reg - R12.id())/4 + 1
};
(id as isize + 1)*(-8)
}
pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRs.iter() {
if reg_id == reg.extract_ssa_id().unwrap() {
......
......@@ -47,6 +47,20 @@ pub use compiler::backend::x86_64::pick_group_for_reg;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::is_callee_saved;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_callee_saved_offset;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_stack_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_code;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_context;
......@@ -82,6 +96,20 @@ pub use compiler::backend::aarch64::pick_group_for_reg;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::is_callee_saved;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_callee_saved_offset;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_stack_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_code;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_context;
......
use ast::ir::*;
use ast::ptr::*;
use ast::types::*;
use runtime::ValueLocation;
use compiler::backend::get_callee_saved_offset;
use std::fmt;
use std::collections::HashMap;
......@@ -25,8 +25,9 @@ pub struct Frame {
pub argument_by_stack: HashMap<MuID, P<Value>>,
pub allocated: HashMap<MuID, FrameSlot>,
// Maping from callee saved id (i.e. the position in the list of callee saved registers) and offset from the frame pointer
pub callee_saved: HashMap<isize, isize>,
// (callsite, destination address)
exception_callsites: Vec<(ValueLocation, ValueLocation)>
}
impl fmt::Display for Frame {
......@@ -37,9 +38,6 @@ impl fmt::Display for Frame {
writeln!(f, " {}", slot).unwrap();
}
writeln!(f, " exception callsites:").unwrap();
for &(ref callsite, ref dest) in self.exception_callsites.iter() {
writeln!(f, " callsite: {} -> {}", callsite, dest).unwrap()
}
writeln!(f, " cur offset: {}", self.cur_offset).unwrap();
writeln!(f, "}}")
}
......@@ -52,9 +50,8 @@ impl Frame {
cur_offset: 0,
argument_by_reg: HashMap::new(),
argument_by_stack: HashMap::new(),
callee_saved: HashMap::new(),
allocated: HashMap::new(),
exception_callsites: vec![]
}
}
......@@ -80,13 +77,21 @@ impl Frame {
}
pub fn alloc_slot_for_callee_saved_reg(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
let (mem, off) = {
let slot = self.alloc_slot(&reg, vm);
slot.make_memory_op(reg.ty.clone(), vm)
(slot.make_memory_op(reg.ty.clone(), vm), slot.offset)
};
let o = get_callee_saved_offset(reg.id());
trace!("ISAAC: callee saved {} is at {}", reg, o);
self.callee_saved.insert(o, off);
mem
}
pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID) {
pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID)
{
self.allocated.remove(&reg);
let id = get_callee_saved_offset(reg);
self.callee_saved.remove(&id);
}
pub fn alloc_slot_for_spilling(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
......@@ -94,15 +99,6 @@ impl Frame {
slot.make_memory_op(reg.ty.clone(), vm)
}
pub fn get_exception_callsites(&self) -> &Vec<(ValueLocation, ValueLocation)> {
&self.exception_callsites
}
pub fn add_exception_callsite(&mut self, callsite: ValueLocation, dest: ValueLocation) {
trace!("add exception callsite: {} to dest {}", callsite, dest);
self.exception_callsites.push((callsite, dest));
}
#[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
pub fn alloc_slot(&mut self, val: &P<Value>, vm: &VM) -> &FrameSlot {
// RBP/FP is 16 bytes aligned, we are offsetting from RBP/FP
......@@ -111,8 +107,12 @@ impl Frame {
let backendty = vm.get_backend_type_info(val.ty.id());
if backendty.alignment > 16 {
if cfg!(target_arch="aarch64") {
panic!("A type cannot have alignment greater than 16 on aarch64")
} else {
unimplemented!()
}
}
self.cur_offset -= backendty.size as isize;
......
......@@ -39,29 +39,29 @@ pop_pair FP, LR
.endm
.macro push_callee_saved stack=SP
push_pair D8, D9, \stack
push_pair D10, D11, \stack
push_pair D12, D13, \stack
push_pair D14, D15, \stack
push_pair X19, X20, \stack
push_pair X21, X22, \stack
push_pair X23, X24, \stack
push_pair X25, X26, \stack
push_pair X27, X28, \stack
push_pair D8, D9, \stack
push_pair D10, D11, \stack
push_pair D12, D13, \stack
push_pair D14, D15, \stack
.endm
.macro pop_callee_saved stack=SP
pop_pair D15, D14, \stack
pop_pair D13, D12, \stack
pop_pair D11, D10, \stack
pop_pair D9, D8, \stack
pop_pair X28, X27, \stack
pop_pair X26, X25, \stack
pop_pair X24, X23, \stack
pop_pair X22, X21, \stack
pop_pair X20, X19, \stack
pop_pair D15, D14, \stack
pop_pair D13, D12, \stack
pop_pair D11, D10, \stack
pop_pair D9, D8, \stack
.endm
.macro pop_arguments stack=SP
......
use ast::ir::*;
use compiler::machine_code::CompiledFunction;
use compiler::backend::aarch64;
use utils::Address;
use utils::Word;
use utils::POINTER_SIZE;
use runtime::thread;
use std::sync::RwLock;
use std::sync::RwLockReadGuard;
use std::collections::HashMap;
use std::fmt;
// muentry_throw_exception in swap_stack_aarch64_sysv.S
// is like a special calling convention to throw_exception_internal
// in order to save all the callee saved registers at a known location
// normal calling convention:
// ---code--- ---stack---
// push caller saved caller saved
// call
// -> (in callee) push LR, FP LR, old FP
// MOV SP -> FP callee saved
// push callee saved
// this function's calling convention
// ---code--- ---stack---
// push caller saved caller saved
// call LR, old FP
// -> (in asm) push callee saved all callee saved <- 2nd arg
// (in rust) push LR, FP (by rust) LR, old FP
// mov SP -> FP (by rust) callee saved
// push callee saved
// we do not want to make any assumptionon where rust saves rbp or callee saved
// so we save them by ourselves in assembly, and pass a pointer as 2nd argument
#[no_mangle]
#[allow(unreachable_code)]
// last_frame_callee_saved: a pointer passed from assembly, values of 6 callee_saved
// registers are layed out as rbx, rbp, r12-r15 (from low address to high address)
// and return address is put after 6 callee saved regsiters
pub extern fn throw_exception_internal(exception_obj: Address, last_frame_callee_saved: Address) -> ! {
trace!("throwing exception: {}", exception_obj);
trace!("callee saved registers of last frame is saved at {}", last_frame_callee_saved);
if cfg!(debug_assertions) {
inspect_higher_address(last_frame_callee_saved, 20);
}
let mut cur_thread = thread::MuThread::current_mut();
// set exception object
cur_thread.exception_obj = exception_obj;
let cf_lock = cur_thread.vm.compiled_funcs().read().unwrap();
let func_lock = cur_thread.vm.funcs().read().unwrap();
let rust_frame_return_addr = unsafe {last_frame_callee_saved.plus(POINTER_SIZE * 19).load::<Address>()};
trace!("return address : 0x{:x} - throw instruction", rust_frame_return_addr);
// the return address is within throwing frame
let throw_frame_callsite = rust_frame_return_addr;
let (throw_func, throw_fv) = find_func_for_address(&cf_lock, &func_lock, throw_frame_callsite).unwrap();
trace!("throwing fucntion: {}", throw_func);
// skip to previous frame
// this is the frame that throws the exception
let previous_frame_fp_loc = last_frame_callee_saved.plus(POINTER_SIZE * 18);
let fp = unsafe {previous_frame_fp_loc.load::<Address>()};
trace!("FP of previous frame is {} (last_frame_callee_saved {} + 144)", fp, last_frame_callee_saved);
// set cursor to throwing frame
let mut cursor = FrameCursor {
fp: fp,
return_addr: unsafe {fp.plus(POINTER_SIZE).load::<Address>()},
func_id