Commit 0ced4d8a authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Implemented very fast exception handling (compared to before...)!

parent 2230463c
...@@ -26,6 +26,8 @@ use compiler::machine_code::CompiledFunction; ...@@ -26,6 +26,8 @@ use compiler::machine_code::CompiledFunction;
use compiler::frame::Frame; use compiler::frame::Frame;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any; use std::any::Any;
const INLINE_FASTPATH : bool = false; const INLINE_FASTPATH : bool = false;
...@@ -40,10 +42,14 @@ pub struct InstructionSelection { ...@@ -40,10 +42,14 @@ pub struct InstructionSelection {
current_block: Option<MuName>, current_block: Option<MuName>,
current_block_in_ir: Option<MuName>, current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>, current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>, // A list of all callsites, with the corresponding exception block (if there is one)
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location // key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>, current_exn_blocks: HashMap<MuID, MuName>,
current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed) current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed)
current_constants: HashMap<MuID, P<Value>>, current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>> current_constants_locs: HashMap<MuID, P<Value>>
...@@ -68,8 +74,7 @@ impl <'a> InstructionSelection { ...@@ -68,8 +74,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection // FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6 // see Issue #6
current_func_start: None, current_func_start: None,
// key: block id, val: callsite that names the block as exception block current_callsites: LinkedList::new(),
current_exn_callsites: HashMap::new(),
current_exn_blocks: HashMap::new(), current_exn_blocks: HashMap::new(),
current_xr_value: None, current_xr_value: None,
current_constants: HashMap::new(), current_constants: HashMap::new(),
...@@ -3042,7 +3047,11 @@ impl <'a> InstructionSelection { ...@@ -3042,7 +3047,11 @@ impl <'a> InstructionSelection {
unimplemented!() unimplemented!()
} else { } else {
let callsite = self.new_callsite_label(cur_node); let callsite = self.new_callsite_label(cur_node);
self.backend.emit_bl(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_bl(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block) // record exception block (CCall may have an exception block)
if cur_node.is_some() { if cur_node.is_some() {
...@@ -3209,14 +3218,9 @@ impl <'a> InstructionSelection { ...@@ -3209,14 +3218,9 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest; let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target; let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) { self.current_callsites.push_back((callsite.to_relocatable(), target_block));
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap(); } else {
callsites.push(callsite); self.current_callsites.push_back((callsite.to_relocatable(), 0));
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
} }
// deal with ret vals // deal with ret vals
...@@ -3284,22 +3288,21 @@ impl <'a> InstructionSelection { ...@@ -3284,22 +3288,21 @@ impl <'a> InstructionSelection {
} }
// push all callee-saved registers // push all callee-saved registers
for i in 0..CALLEE_SAVED_FPRs.len() { for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i]; let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm); let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg); self.backend.emit_str_callee_saved(&loc, &reg);
} }
for i in 0..CALLEE_SAVED_GPRs.len() { for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i]; let ref reg = CALLEE_SAVED_FPRs[i];
trace!("allocate frame slot for regs {}", reg);
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm); let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg); self.backend.emit_str_callee_saved(&loc, &reg);
} }
// unload arguments // unload arguments
...@@ -3370,16 +3373,16 @@ impl <'a> InstructionSelection { ...@@ -3370,16 +3373,16 @@ impl <'a> InstructionSelection {
self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein); self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein);
// pop all callee-saved registers // pop all callee-saved registers
for i in (0..CALLEE_SAVED_GPRs.len()).rev() { for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i]; let ref reg = CALLEE_SAVED_FPRs[i];
let reg_id = reg.extract_ssa_id().unwrap(); let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm); let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_ldr_callee_saved(reg, &loc); self.backend.emit_ldr_callee_saved(reg, &loc);
} }
for i in (0..CALLEE_SAVED_FPRs.len()).rev() { for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i]; let ref reg = CALLEE_SAVED_GPRs[i];
let reg_id = reg.extract_ssa_id().unwrap(); let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm); let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
...@@ -4093,7 +4096,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4093,7 +4096,7 @@ impl CompilerPass for InstructionSelection {
start_loc start_loc
}); });
self.current_callsite_id = 0; self.current_callsite_id = 0;
self.current_exn_callsites.clear(); self.current_callsites.clear();
self.current_exn_blocks.clear(); self.current_exn_blocks.clear();
self.current_constants.clear(); self.current_constants.clear();
...@@ -4124,7 +4127,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4124,7 +4127,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions // we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone()); let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc); self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else { } else {
// normal block // normal block
self.backend.start_block(block_label.clone()); self.backend.start_block(block_label.clone());
...@@ -4160,7 +4163,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4160,7 +4163,7 @@ impl CompilerPass for InstructionSelection {
} }
} }
fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) { fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
self.emit_common_epilogue(&func.sig.ret_tys, &mut func.context, vm); self.emit_common_epilogue(&func.sig.ret_tys, &mut func.context, vm);
self.backend.print_cur_code(); self.backend.print_cur_code();
...@@ -4178,23 +4181,19 @@ impl CompilerPass for InstructionSelection { ...@@ -4178,23 +4181,19 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone()); let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info // insert exception branch info
let mut frame = match self.current_frame.take() { let frame = match self.current_frame.take() {
Some(frame) => frame, Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name) None => panic!("no current_frame for function {} that is being compiled", func_name)
}; };
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) { for &(ref callsite, block_id) in self.current_callsites.iter() {
Some(loc) => loc, let block_loc = if block_id == 0 {
None => panic!("failed to find exception block {}", block_id) String::new()
}; } else {
let callsites = match self.current_exn_callsites.get(&block_id) { self.current_exn_blocks.get(&block_id).unwrap().clone()
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
}; };
for callsite in callsites { vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
} }
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc, let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
......
This diff is collapsed.
...@@ -28,6 +28,8 @@ use utils::math; ...@@ -28,6 +28,8 @@ use utils::math;
use utils::POINTER_SIZE; use utils::POINTER_SIZE;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any; use std::any::Any;
lazy_static! { lazy_static! {
...@@ -108,10 +110,12 @@ pub struct InstructionSelection { ...@@ -108,10 +110,12 @@ pub struct InstructionSelection {
current_block: Option<MuName>, current_block: Option<MuName>,
current_block_in_ir: Option<MuName>, current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>, current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block // Technically this is a map in that each Key is unique, but we will never try and add duplicate
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>, // keys, or look things up, so a list of pairs is faster than a Map.
// A list of pairs, the first is the name of a callsite the second
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location // key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>, current_exn_blocks: HashMap<MuID, MuName>,
current_constants: HashMap<MuID, P<Value>>, current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>> current_constants_locs: HashMap<MuID, P<Value>>
...@@ -135,8 +139,7 @@ impl <'a> InstructionSelection { ...@@ -135,8 +139,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection // FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6 // see Issue #6
current_func_start: None, current_func_start: None,
// key: block id, val: callsite that names the block as exception block current_callsites: LinkedList::new(),
current_exn_callsites: HashMap::new(),
current_exn_blocks: HashMap::new(), current_exn_blocks: HashMap::new(),
current_constants: HashMap::new(), current_constants: HashMap::new(),
...@@ -3166,8 +3169,11 @@ impl <'a> InstructionSelection { ...@@ -3166,8 +3169,11 @@ impl <'a> InstructionSelection {
unimplemented!() unimplemented!()
} else { } else {
let callsite = self.new_callsite_label(cur_node); let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite, func_name, None); // assume ccall wont throw exception self.backend.emit_call_near_rel32(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block) // record exception block (CCall may have an exception block)
if cur_node.is_some() { if cur_node.is_some() {
let cur_node = cur_node.unwrap(); let cur_node = cur_node.unwrap();
...@@ -3347,21 +3353,16 @@ impl <'a> InstructionSelection { ...@@ -3347,21 +3353,16 @@ impl <'a> InstructionSelection {
// record exception branch // record exception branch
let ref exn_dest = resumption.as_ref().unwrap().exn_dest; let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target; let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) { self.current_callsites.push_back((callsite.to_relocatable(), target_block));
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
// insert an intermediate block to branch to normal // insert an intermediate block to branch to normal
// the branch is inserted later (because we need to deal with postcall convention) // the branch is inserted later (because we need to deal with postcall convention)
self.finish_block(); self.finish_block();
let fv_id = self.current_fv_id; let fv_id = self.current_fv_id;
self.start_block(format!("normal_cont_for_call_{}_{}", fv_id, cur_node.id())); self.start_block(format!("normal_cont_for_call_{}_{}", fv_id, cur_node.id()));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0));
} }
// deal with ret vals, collapse stack etc. // deal with ret vals, collapse stack etc.
...@@ -4811,7 +4812,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4811,7 +4812,7 @@ impl CompilerPass for InstructionSelection {
start_loc start_loc
}); });
self.current_callsite_id = 0; self.current_callsite_id = 0;
self.current_exn_callsites.clear(); self.current_callsites.clear();
self.current_exn_blocks.clear(); self.current_exn_blocks.clear();
self.current_constants.clear(); self.current_constants.clear();
...@@ -4843,7 +4844,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4843,7 +4844,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions // we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone()); let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc); self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else { } else {
// normal block // normal block
self.backend.start_block(block_label.clone()); self.backend.start_block(block_label.clone());
...@@ -4904,25 +4905,21 @@ impl CompilerPass for InstructionSelection { ...@@ -4904,25 +4905,21 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone()); let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info // insert exception branch info
let mut frame = match self.current_frame.take() { let frame = match self.current_frame.take() {
Some(frame) => frame, Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name) None => panic!("no current_frame for function {} that is being compiled", func_name)
}; };
for block_id in self.current_exn_blocks.keys() { for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = match self.current_exn_blocks.get(&block_id) { let block_loc = if block_id == 0 {
Some(loc) => loc, String::new()
None => panic!("failed to find exception block {}", block_id) } else {
}; self.current_exn_blocks.get(&block_id).unwrap().clone()
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
}; };
for callsite in callsites { vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
} }
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc, let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
self.current_constants.clone(), self.current_constants_locs.clone(), self.current_constants.clone(), self.current_constants_locs.clone(),
frame, self.current_func_start.take().unwrap(), func_end); frame, self.current_func_start.take().unwrap(), func_end);
......
...@@ -13,6 +13,7 @@ pub use compiler::backend::x86_64::asm_backend::emit_context; ...@@ -13,6 +13,7 @@ pub use compiler::backend::x86_64::asm_backend::emit_context;
pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc; pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc;
#[cfg(feature = "aot")] #[cfg(feature = "aot")]
pub use compiler::backend::x86_64::asm_backend::spill_rewrite; pub use compiler::backend::x86_64::asm_backend::spill_rewrite;
use utils::Address;
use ast::ptr::P; use ast::ptr::P;
use ast::ir::*; use ast::ir::*;
...@@ -22,6 +23,9 @@ use compiler::backend::RegGroup; ...@@ -22,6 +23,9 @@ use compiler::backend::RegGroup;
use utils::LinkedHashMap; use utils::LinkedHashMap;
use std::collections::HashMap; use std::collections::HashMap;
// Number of nromal callee saved registers (excluding RSP and RBP)
pub const CALLEE_SAVED_COUNT : usize = 5;
macro_rules! GPR_ALIAS { macro_rules! GPR_ALIAS {
($alias: ident: ($id64: expr, $r64: ident) -> $r32: ident, $r16: ident, $r8l: ident, $r8h: ident) => { ($alias: ident: ($id64: expr, $r64: ident) -> $r32: ident, $r16: ident, $r8l: ident, $r8h: ident) => {
lazy_static!{ lazy_static!{
...@@ -448,6 +452,47 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup { ...@@ -448,6 +452,47 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup {
RegGroup::get_from_value(reg) RegGroup::get_from_value(reg)
} }
// Gets the previouse frame pointer with respect to the current
#[inline(always)]
pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
unsafe { frame_pointer.load::<Address>() }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() }
}
// Gets the stack pointer before the current frame was created
#[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16)
}
#[inline(always)]
pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.store::<Address>(value) }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) }
}
// Reg should be a 64-bit callee saved GPR or FPR
pub fn get_callee_saved_offset(reg: MuID) -> isize {
debug_assert!(is_callee_saved(reg) && reg != RBP.id());
let id = if reg == RBX.id() {
0
} else {
(reg - R12.id())/4 + 1
};
(id as isize + 1)*(-8)
}
pub fn is_callee_saved(reg_id: MuID) -> bool { pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRs.iter() { for reg in CALLEE_SAVED_GPRs.iter() {
if reg_id == reg.extract_ssa_id().unwrap() { if reg_id == reg.extract_ssa_id().unwrap() {
......
...@@ -47,6 +47,20 @@ pub use compiler::backend::x86_64::pick_group_for_reg; ...@@ -47,6 +47,20 @@ pub use compiler::backend::x86_64::pick_group_for_reg;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::is_callee_saved; pub use compiler::backend::x86_64::is_callee_saved;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_callee_saved_offset;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_stack_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_code; pub use compiler::backend::x86_64::emit_code;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_context; pub use compiler::backend::x86_64::emit_context;
...@@ -82,6 +96,20 @@ pub use compiler::backend::aarch64::pick_group_for_reg; ...@@ -82,6 +96,20 @@ pub use compiler::backend::aarch64::pick_group_for_reg;
#[cfg(target_arch = "aarch64")] #[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::is_callee_saved; pub use compiler::backend::aarch64::is_callee_saved;
#[cfg(target_arch = "aarch64")] #[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_callee_saved_offset;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_stack_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_code; pub use compiler::backend::aarch64::emit_code;
#[cfg(target_arch = "aarch64")] #[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_context; pub use compiler::backend::aarch64::emit_context;
......
use ast::ir::*; use ast::ir::*;
use ast::ptr::*; use ast::ptr::*;
use ast::types::*; use ast::types::*;
use runtime::ValueLocation; use compiler::backend::get_callee_saved_offset;
use std::fmt; use std::fmt;
use std::collections::HashMap; use std::collections::HashMap;
...@@ -25,8 +25,9 @@ pub struct Frame { ...@@ -25,8 +25,9 @@ pub struct Frame {
pub argument_by_stack: HashMap<MuID, P<Value>>, pub argument_by_stack: HashMap<MuID, P<Value>>,
pub allocated: HashMap<MuID, FrameSlot>, pub allocated: HashMap<MuID, FrameSlot>,
// Maping from callee saved id (i.e. the position in the list of callee saved registers) and offset from the frame pointer
pub callee_saved: HashMap<isize, isize>,
// (callsite, destination address) // (callsite, destination address)
exception_callsites: Vec<(ValueLocation, ValueLocation)>
} }
impl fmt::Display for Frame { impl fmt::Display for Frame {
...@@ -37,9 +38,6 @@ impl fmt::Display for Frame { ...@@ -37,9 +38,6 @@ impl fmt::Display for Frame {
writeln!(f, " {}", slot).unwrap(); writeln!(f, " {}", slot).unwrap();
} }
writeln!(f, " exception callsites:").unwrap(); writeln!(f, " exception callsites:").unwrap();
for &(ref callsite, ref dest) in self.exception_callsites.iter() {
writeln!(f, " callsite: {} -> {}", callsite, dest).unwrap()
}
writeln!(f, " cur offset: {}", self.cur_offset).unwrap(); writeln!(f, " cur offset: {}", self.cur_offset).unwrap();
writeln!(f, "}}") writeln!(f, "}}")
} }
...@@ -52,9 +50,8 @@ impl Frame { ...@@ -52,9 +50,8 @@ impl Frame {
cur_offset: 0, cur_offset: 0,
argument_by_reg: HashMap::new(), argument_by_reg: HashMap::new(),
argument_by_stack: HashMap::new(), argument_by_stack: HashMap::new(),
callee_saved: HashMap::new(),
allocated: HashMap::new(), allocated: HashMap::new(),
exception_callsites: vec![]
} }
} }
...@@ -80,13 +77,21 @@ impl Frame { ...@@ -80,13 +77,21 @@ impl Frame {
} }
pub fn alloc_slot_for_callee_saved_reg(&mut self, reg: P<Value>, vm: &VM) -> P<Value> { pub fn alloc_slot_for_callee_saved_reg(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
let slot = self.alloc_slot(&reg, vm); let (mem, off) = {
let slot = self.alloc_slot(&reg, vm);
slot.make_memory_op(reg.ty.clone(), vm) (slot.make_memory_op(reg.ty.clone(), vm), slot.offset)
};
let o = get_callee_saved_offset(reg.id());
trace!("ISAAC: callee saved {} is at {}", reg, o);
self.callee_saved.insert(o, off);
mem
} }
pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID) { pub fn remove_record_for_callee_saved_reg(&mut self, reg: MuID)
{
self.allocated.remove(&reg); self.allocated.remove(&reg);
let id = get_callee_saved_offset(reg);
self.callee_saved.remove(&id);
} }
pub fn alloc_slot_for_spilling(&mut self, reg: P<Value>, vm: &VM) -> P<Value> { pub fn alloc_slot_for_spilling(&mut self, reg: P<Value>, vm: &VM) -> P<Value> {
...@@ -94,15 +99,6 @@ impl Frame { ...@@ -94,15 +99,6 @@ impl Frame {
slot.make_memory_op(reg.ty.clone(), vm) slot.make_memory_op(reg.ty.clone(), vm)
} }
pub fn get_exception_callsites(&self) -> &Vec<(ValueLocation, ValueLocation)> {
&self.exception_callsites
}
pub fn add_exception_callsite(&mut self, callsite: ValueLocation, dest: ValueLocation) {
trace!("add exception callsite: {} to dest {}", callsite, dest);
self.exception_callsites.push((callsite, dest));
}
#[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))] #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
pub fn alloc_slot(&mut self, val: &P<Value>, vm: &VM) -> &FrameSlot { pub fn alloc_slot(&mut self, val: &P<Value>, vm: &VM) -> &FrameSlot {
// RBP/FP is 16 bytes aligned, we are offsetting from RBP/FP // RBP/FP is 16 bytes aligned, we are offsetting from RBP/FP
...@@ -111,7 +107,11 @@ impl Frame { ...@@ -111,7 +107,11 @@ impl Frame {
let backendty = vm.get_backend_type_info(val.ty.id()); let backendty = vm.get_backend_type_info(val.ty.id());
if backendty.alignment > 16 { if backendty.alignment > 16 {
unimplemented!() if cfg!(target_arch="aarch64") {
panic!("A type cannot have alignment greater than 16 on aarch64")
} else {
unimplemented!()
}
} }
self.cur_offset -= backendty.size as isize; self.cur_offset -= backendty.size as isize;
......
...@@ -39,29 +39,29 @@ pop_pair FP, LR ...@@ -39,29 +39,29 @@ pop_pair FP, LR
.endm .endm
.macro push_callee_saved stack=SP .macro push_callee_saved stack=SP
push_pair D8, D9, \stack
push_pair D10, D11, \stack
push_pair D12, D13, \stack
push_pair D14, D15, \stack
push_pair X19, X20, \stack push_pair X19, X20, \stack
push_pair X21, X22, \stack push_pair X21, X22, \stack