Commit 0ced4d8a authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Implemented very fast exception handling (compared to before...)!

parent 2230463c
...@@ -26,6 +26,8 @@ use compiler::machine_code::CompiledFunction; ...@@ -26,6 +26,8 @@ use compiler::machine_code::CompiledFunction;
use compiler::frame::Frame; use compiler::frame::Frame;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any; use std::any::Any;
const INLINE_FASTPATH : bool = false; const INLINE_FASTPATH : bool = false;
...@@ -40,10 +42,14 @@ pub struct InstructionSelection { ...@@ -40,10 +42,14 @@ pub struct InstructionSelection {
current_block: Option<MuName>, current_block: Option<MuName>,
current_block_in_ir: Option<MuName>, current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>, current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>, // A list of all callsites, with the corresponding exception block (if there is one)
// Technically this is a map in that each Key is unique, but we will never try and add duplicate
// keys, or look things up, so a list of pairs is faster than a Map.
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location // key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>, current_exn_blocks: HashMap<MuID, MuName>,
current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed) current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed)
current_constants: HashMap<MuID, P<Value>>, current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>> current_constants_locs: HashMap<MuID, P<Value>>
...@@ -68,8 +74,7 @@ impl <'a> InstructionSelection { ...@@ -68,8 +74,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection // FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6 // see Issue #6
current_func_start: None, current_func_start: None,
// key: block id, val: callsite that names the block as exception block current_callsites: LinkedList::new(),
current_exn_callsites: HashMap::new(),
current_exn_blocks: HashMap::new(), current_exn_blocks: HashMap::new(),
current_xr_value: None, current_xr_value: None,
current_constants: HashMap::new(), current_constants: HashMap::new(),
...@@ -3042,7 +3047,11 @@ impl <'a> InstructionSelection { ...@@ -3042,7 +3047,11 @@ impl <'a> InstructionSelection {
unimplemented!() unimplemented!()
} else { } else {
let callsite = self.new_callsite_label(cur_node); let callsite = self.new_callsite_label(cur_node);
self.backend.emit_bl(callsite, func_name, None); // assume ccall wont throw exception
self.backend.emit_bl(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block) // record exception block (CCall may have an exception block)
if cur_node.is_some() { if cur_node.is_some() {
...@@ -3209,14 +3218,9 @@ impl <'a> InstructionSelection { ...@@ -3209,14 +3218,9 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest; let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target; let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) { self.current_callsites.push_back((callsite.to_relocatable(), target_block));
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else { } else {
let mut callsites = vec![]; self.current_callsites.push_back((callsite.to_relocatable(), 0));
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
} }
// deal with ret vals // deal with ret vals
...@@ -3284,22 +3288,21 @@ impl <'a> InstructionSelection { ...@@ -3284,22 +3288,21 @@ impl <'a> InstructionSelection {
} }
// push all callee-saved registers // push all callee-saved registers
for i in 0..CALLEE_SAVED_FPRs.len() { for i in 0..CALLEE_SAVED_GPRs.len() {
let ref reg = CALLEE_SAVED_FPRs[i]; let ref reg = CALLEE_SAVED_GPRs[i];
trace!("allocate frame slot for regs {}", reg);
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm); let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg); self.backend.emit_str_callee_saved(&loc, &reg);
} }
for i in 0..CALLEE_SAVED_GPRs.len() { for i in 0..CALLEE_SAVED_FPRs.len() {
let ref reg = CALLEE_SAVED_GPRs[i]; let ref reg = CALLEE_SAVED_FPRs[i];
trace!("allocate frame slot for regs {}", reg);
trace!("allocate frame slot for reg {}", reg);
let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm); let loc = self.current_frame.as_mut().unwrap().alloc_slot_for_callee_saved_reg(reg.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_str_callee_saved(&loc, &reg); self.backend.emit_str_callee_saved(&loc, &reg);
} }
// unload arguments // unload arguments
...@@ -3370,16 +3373,16 @@ impl <'a> InstructionSelection { ...@@ -3370,16 +3373,16 @@ impl <'a> InstructionSelection {
self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein); self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein);
// pop all callee-saved registers // pop all callee-saved registers
for i in (0..CALLEE_SAVED_GPRs.len()).rev() { for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
let ref reg = CALLEE_SAVED_GPRs[i]; let ref reg = CALLEE_SAVED_FPRs[i];
let reg_id = reg.extract_ssa_id().unwrap(); let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm); let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
self.backend.emit_ldr_callee_saved(reg, &loc); self.backend.emit_ldr_callee_saved(reg, &loc);
} }
for i in (0..CALLEE_SAVED_FPRs.len()).rev() { for i in (0..CALLEE_SAVED_GPRs.len()).rev() {
let ref reg = CALLEE_SAVED_FPRs[i]; let ref reg = CALLEE_SAVED_GPRs[i];
let reg_id = reg.extract_ssa_id().unwrap(); let reg_id = reg.extract_ssa_id().unwrap();
let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm); let loc = self.current_frame.as_mut().unwrap().allocated.get(&reg_id).unwrap().make_memory_op(reg.ty.clone(), vm);
let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm); let loc = emit_mem(self.backend.as_mut(), &loc, get_type_alignment(&reg.ty, vm), f_context, vm);
...@@ -4093,7 +4096,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4093,7 +4096,7 @@ impl CompilerPass for InstructionSelection {
start_loc start_loc
}); });
self.current_callsite_id = 0; self.current_callsite_id = 0;
self.current_exn_callsites.clear(); self.current_callsites.clear();
self.current_exn_blocks.clear(); self.current_exn_blocks.clear();
self.current_constants.clear(); self.current_constants.clear();
...@@ -4124,7 +4127,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4124,7 +4127,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions // we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone()); let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc); self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else { } else {
// normal block // normal block
self.backend.start_block(block_label.clone()); self.backend.start_block(block_label.clone());
...@@ -4178,23 +4181,19 @@ impl CompilerPass for InstructionSelection { ...@@ -4178,23 +4181,19 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone()); let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info // insert exception branch info
let mut frame = match self.current_frame.take() { let frame = match self.current_frame.take() {
Some(frame) => frame, Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name) None => panic!("no current_frame for function {} that is being compiled", func_name)
}; };
for block_id in self.current_exn_blocks.keys() {
let block_loc = match self.current_exn_blocks.get(&block_id) { for &(ref callsite, block_id) in self.current_callsites.iter() {
Some(loc) => loc, let block_loc = if block_id == 0 {
None => panic!("failed to find exception block {}", block_id) String::new()
}; } else {
let callsites = match self.current_exn_callsites.get(&block_id) { self.current_exn_blocks.get(&block_id).unwrap().clone()
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
}; };
for callsite in callsites { vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
} }
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc, let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
......
...@@ -5,7 +5,6 @@ ...@@ -5,7 +5,6 @@
#![allow(non_upper_case_globals)] #![allow(non_upper_case_globals)]
// TODO: Move architecture independent codes in here, inst_sel and asm_backend to somewhere else... // TODO: Move architecture independent codes in here, inst_sel and asm_backend to somewhere else...
pub mod inst_sel; pub mod inst_sel;
...@@ -17,6 +16,8 @@ pub use compiler::backend::aarch64::asm_backend::ASMCodeGen; ...@@ -17,6 +16,8 @@ pub use compiler::backend::aarch64::asm_backend::ASMCodeGen;
pub use compiler::backend::aarch64::asm_backend::emit_code; pub use compiler::backend::aarch64::asm_backend::emit_code;
pub use compiler::backend::aarch64::asm_backend::emit_context; pub use compiler::backend::aarch64::asm_backend::emit_context;
pub use compiler::backend::aarch64::asm_backend::emit_context_with_reloc; pub use compiler::backend::aarch64::asm_backend::emit_context_with_reloc;
use utils::Address;
#[cfg(feature = "aot")] #[cfg(feature = "aot")]
pub use compiler::backend::aarch64::asm_backend::spill_rewrite; pub use compiler::backend::aarch64::asm_backend::spill_rewrite;
...@@ -30,6 +31,9 @@ use vm::VM; ...@@ -30,6 +31,9 @@ use vm::VM;
use utils::LinkedHashMap; use utils::LinkedHashMap;
use std::collections::HashMap; use std::collections::HashMap;
// Number of nromal callee saved registers (excluding FP and LR, and SP)
pub const CALLEE_SAVED_COUNT : usize = 18;
macro_rules! REGISTER { macro_rules! REGISTER {
($id:expr, $name: expr, $ty: ident) => { ($id:expr, $name: expr, $ty: ident) => {
{ {
...@@ -111,7 +115,7 @@ GPR_ALIAS!(XZR_ALIAS: (64, XZR) -> WZR); // Pseudo register, not to be used by ...@@ -111,7 +115,7 @@ GPR_ALIAS!(XZR_ALIAS: (64, XZR) -> WZR); // Pseudo register, not to be used by
ALIAS!(X8 -> XR); // Indirect result location register (points to a location in memory to write return values to) ALIAS!(X8 -> XR); // Indirect result location register (points to a location in memory to write return values to)
ALIAS!(X16 -> IP0); // Intra proecdure call register 0 (may be modified by the linker when executing BL/BLR instructions) ALIAS!(X16 -> IP0); // Intra proecdure call register 0 (may be modified by the linker when executing BL/BLR instructions)
ALIAS!(X17 -> IP1);// Intra proecdure call register 1 (may be modified by the linker when executing BL/BLR instructions) ALIAS!(X17 -> IP1);// Intra proecdure call register 1 (may be modified by the linker when executing BL/BLR instructions)
ALIAS!(X18 -> PR); // Platform Register (NEVER TOUCH THIS REGISTER (Unless you can proove Linux dosn't use it)) ALIAS!(X18 -> PR); // Platform Register (NEVER TOUCH THIS REGISTER (Unless you can prove Linux dosn't use it))
ALIAS!(X29 -> FP); // Frame Pointer (can be used as a normal register when not calling or returning) ALIAS!(X29 -> FP); // Frame Pointer (can be used as a normal register when not calling or returning)
ALIAS!(X30 -> LR); // Link Register (not supposed to be used for any other purpose) ALIAS!(X30 -> LR); // Link Register (not supposed to be used for any other purpose)
...@@ -760,6 +764,59 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup { ...@@ -760,6 +764,59 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup {
} }
} }
// Gets the previouse frame pointer with respect to the current
#[inline(always)]
pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
unsafe { frame_pointer.load::<Address>() }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() }
}
// Gets the stack pointer before the current frame was created
#[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16)
}
#[inline(always)]
pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.store::<Address>(value) }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) }
}
// Reg should be a 64-bit callee saved GPR or FPR
pub fn get_callee_saved_offset(reg: MuID) -> isize {
debug_assert!(is_callee_saved(reg));
let id = if reg < FPR_ID_START {
(reg - CALLEE_SAVED_GPRs[0].id())/2
} else {
(reg - CALLEE_SAVED_FPRs[0].id()) / 2 + CALLEE_SAVED_GPRs.len()
};
(id as isize + 1)*(-8)
}
// Returns the callee saved register with the id...
/*pub fn get_callee_saved_register(offset: isize) -> P<Value> {
debug_assert!(offset <= -8 && (-offset) % 8 == 0);
let id = ((offset/-8) - 1) as usize;
if id < CALLEE_SAVED_GPRs.len() {
CALLEE_SAVED_GPRs[id].clone()
} else if id - CALLEE_SAVED_GPRs.len() < CALLEE_SAVED_FPRs.len() {
CALLEE_SAVED_FPRs[id - CALLEE_SAVED_GPRs.len()].clone()
} else {
panic!("There is no callee saved register with id {}", offset)
}
}*/
pub fn is_callee_saved(reg_id: MuID) -> bool { pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRs.iter() { for reg in CALLEE_SAVED_GPRs.iter() {
......
...@@ -28,6 +28,8 @@ use utils::math; ...@@ -28,6 +28,8 @@ use utils::math;
use utils::POINTER_SIZE; use utils::POINTER_SIZE;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::LinkedList;
use std::any::Any; use std::any::Any;
lazy_static! { lazy_static! {
...@@ -108,10 +110,12 @@ pub struct InstructionSelection { ...@@ -108,10 +110,12 @@ pub struct InstructionSelection {
current_block: Option<MuName>, current_block: Option<MuName>,
current_block_in_ir: Option<MuName>, current_block_in_ir: Option<MuName>,
current_func_start: Option<ValueLocation>, current_func_start: Option<ValueLocation>,
// key: block id, val: callsite that names the block as exception block // Technically this is a map in that each Key is unique, but we will never try and add duplicate
current_exn_callsites: HashMap<MuID, Vec<ValueLocation>>, // keys, or look things up, so a list of pairs is faster than a Map.
// A list of pairs, the first is the name of a callsite the second
current_callsites: LinkedList<(MuName, MuID)>,
// key: block id, val: block location // key: block id, val: block location
current_exn_blocks: HashMap<MuID, ValueLocation>, current_exn_blocks: HashMap<MuID, MuName>,
current_constants: HashMap<MuID, P<Value>>, current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>> current_constants_locs: HashMap<MuID, P<Value>>
...@@ -135,8 +139,7 @@ impl <'a> InstructionSelection { ...@@ -135,8 +139,7 @@ impl <'a> InstructionSelection {
// FIXME: ideally we should not create new blocks in instruction selection // FIXME: ideally we should not create new blocks in instruction selection
// see Issue #6 // see Issue #6
current_func_start: None, current_func_start: None,
// key: block id, val: callsite that names the block as exception block current_callsites: LinkedList::new(),
current_exn_callsites: HashMap::new(),
current_exn_blocks: HashMap::new(), current_exn_blocks: HashMap::new(),
current_constants: HashMap::new(), current_constants: HashMap::new(),
...@@ -3166,7 +3169,10 @@ impl <'a> InstructionSelection { ...@@ -3166,7 +3169,10 @@ impl <'a> InstructionSelection {
unimplemented!() unimplemented!()
} else { } else {
let callsite = self.new_callsite_label(cur_node); let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite, func_name, None); // assume ccall wont throw exception self.backend.emit_call_near_rel32(callsite.clone(), func_name, None); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0));
// record exception block (CCall may have an exception block) // record exception block (CCall may have an exception block)
if cur_node.is_some() { if cur_node.is_some() {
...@@ -3348,20 +3354,15 @@ impl <'a> InstructionSelection { ...@@ -3348,20 +3354,15 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest; let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target; let target_block = exn_dest.target;
if self.current_exn_callsites.contains_key(&target_block) { self.current_callsites.push_back((callsite.to_relocatable(), target_block));
let callsites = self.current_exn_callsites.get_mut(&target_block).unwrap();
callsites.push(callsite);
} else {
let mut callsites = vec![];
callsites.push(callsite);
self.current_exn_callsites.insert(target_block, callsites);
}
// insert an intermediate block to branch to normal // insert an intermediate block to branch to normal
// the branch is inserted later (because we need to deal with postcall convention) // the branch is inserted later (because we need to deal with postcall convention)
self.finish_block(); self.finish_block();
let fv_id = self.current_fv_id; let fv_id = self.current_fv_id;
self.start_block(format!("normal_cont_for_call_{}_{}", fv_id, cur_node.id())); self.start_block(format!("normal_cont_for_call_{}_{}", fv_id, cur_node.id()));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0));
} }
// deal with ret vals, collapse stack etc. // deal with ret vals, collapse stack etc.
...@@ -4811,7 +4812,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4811,7 +4812,7 @@ impl CompilerPass for InstructionSelection {
start_loc start_loc
}); });
self.current_callsite_id = 0; self.current_callsite_id = 0;
self.current_exn_callsites.clear(); self.current_callsites.clear();
self.current_exn_blocks.clear(); self.current_exn_blocks.clear();
self.current_constants.clear(); self.current_constants.clear();
...@@ -4843,7 +4844,7 @@ impl CompilerPass for InstructionSelection { ...@@ -4843,7 +4844,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions // we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone()); let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), loc); self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else { } else {
// normal block // normal block
self.backend.start_block(block_label.clone()); self.backend.start_block(block_label.clone());
...@@ -4904,25 +4905,21 @@ impl CompilerPass for InstructionSelection { ...@@ -4904,25 +4905,21 @@ impl CompilerPass for InstructionSelection {
let (mc, func_end) = self.backend.finish_code(func_name.clone()); let (mc, func_end) = self.backend.finish_code(func_name.clone());
// insert exception branch info // insert exception branch info
let mut frame = match self.current_frame.take() { let frame = match self.current_frame.take() {
Some(frame) => frame, Some(frame) => frame,
None => panic!("no current_frame for function {} that is being compiled", func_name) None => panic!("no current_frame for function {} that is being compiled", func_name)
}; };
for block_id in self.current_exn_blocks.keys() { for &(ref callsite, block_id) in self.current_callsites.iter() {
let block_loc = match self.current_exn_blocks.get(&block_id) { let block_loc = if block_id == 0 {
Some(loc) => loc, String::new()
None => panic!("failed to find exception block {}", block_id) } else {
}; self.current_exn_blocks.get(&block_id).unwrap().clone()
let callsites = match self.current_exn_callsites.get(&block_id) {
Some(callsite) => callsite,
None => panic!("failed to find callsite for block {}", block_id)
}; };
for callsite in callsites { vm.add_exception_callsite(callsite.clone(), block_loc, self.current_fv_id);
frame.add_exception_callsite(callsite.clone(), block_loc.clone());
}
} }
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc, let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
self.current_constants.clone(), self.current_constants_locs.clone(), self.current_constants.clone(), self.current_constants_locs.clone(),
frame, self.current_func_start.take().unwrap(), func_end); frame, self.current_func_start.take().unwrap(), func_end);
......
...@@ -13,6 +13,7 @@ pub use compiler::backend::x86_64::asm_backend::emit_context; ...@@ -13,6 +13,7 @@ pub use compiler::backend::x86_64::asm_backend::emit_context;
pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc; pub use compiler::backend::x86_64::asm_backend::emit_context_with_reloc;
#[cfg(feature = "aot")] #[cfg(feature = "aot")]
pub use compiler::backend::x86_64::asm_backend::spill_rewrite; pub use compiler::backend::x86_64::asm_backend::spill_rewrite;
use utils::Address;
use ast::ptr::P; use ast::ptr::P;
use ast::ir::*; use ast::ir::*;
...@@ -22,6 +23,9 @@ use compiler::backend::RegGroup; ...@@ -22,6 +23,9 @@ use compiler::backend::RegGroup;
use utils::LinkedHashMap; use utils::LinkedHashMap;
use std::collections::HashMap; use std::collections::HashMap;
// Number of nromal callee saved registers (excluding RSP and RBP)
pub const CALLEE_SAVED_COUNT : usize = 5;
macro_rules! GPR_ALIAS { macro_rules! GPR_ALIAS {
($alias: ident: ($id64: expr, $r64: ident) -> $r32: ident, $r16: ident, $r8l: ident, $r8h: ident) => { ($alias: ident: ($id64: expr, $r64: ident) -> $r32: ident, $r16: ident, $r8l: ident, $r8h: ident) => {
lazy_static!{ lazy_static!{
...@@ -448,6 +452,47 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup { ...@@ -448,6 +452,47 @@ pub fn pick_group_for_reg(reg_id: MuID) -> RegGroup {
RegGroup::get_from_value(reg) RegGroup::get_from_value(reg)
} }
// Gets the previouse frame pointer with respect to the current
#[inline(always)]
pub fn get_previous_frame_pointer(frame_pointer: Address) -> Address {
unsafe { frame_pointer.load::<Address>() }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn get_return_address(frame_pointer: Address) -> Address {
unsafe { frame_pointer.plus(8).load::<Address>() }
}
// Gets the stack pointer before the current frame was created
#[inline(always)]
pub fn get_previous_stack_pointer(frame_pointer: Address) -> Address {
frame_pointer.plus(16)
}
#[inline(always)]
pub fn set_previous_frame_pointer(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.store::<Address>(value) }
}
// Gets the return address for the current frame pointer
#[inline(always)]
pub fn set_return_address(frame_pointer: Address, value: Address) {
unsafe { frame_pointer.plus(8).store::<Address>(value) }
}
// Reg should be a 64-bit callee saved GPR or FPR
pub fn get_callee_saved_offset(reg: MuID) -> isize {
debug_assert!(is_callee_saved(reg) && reg != RBP.id());
let id = if reg == RBX.id() {
0
} else {
(reg - R12.id())/4 + 1
};
(id as isize + 1)*(-8)
}
pub fn is_callee_saved(reg_id: MuID) -> bool { pub fn is_callee_saved(reg_id: MuID) -> bool {
for reg in CALLEE_SAVED_GPRs.iter() { for reg in CALLEE_SAVED_GPRs.iter() {
if reg_id == reg.extract_ssa_id().unwrap() { if reg_id == reg.extract_ssa_id().unwrap() {
......
...@@ -47,6 +47,20 @@ pub use compiler::backend::x86_64::pick_group_for_reg; ...@@ -47,6 +47,20 @@ pub use compiler::backend::x86_64::pick_group_for_reg;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::is_callee_saved; pub use compiler::backend::x86_64::is_callee_saved;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_callee_saved_offset;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_previous_frame_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::set_return_address;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::get_previous_stack_pointer;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_code; pub use compiler::backend::x86_64::emit_code;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_context; pub use compiler::backend::x86_64::emit_context;
...@@ -82,6 +96,20 @@ pub use compiler::backend::aarch64::pick_group_for_reg; ...@@ -82,6 +96,20 @@ pub use compiler::backend::aarch64::pick_group_for_reg;
#[cfg(target_arch = "aarch64")] #[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::is_callee_saved; pub use compiler::backend::aarch64::is_callee_saved;
#[cfg(target_arch = "aarch64")] #[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::CALLEE_SAVED_COUNT ;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_callee_saved_offset;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::get_previous_stack_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_previous_frame_pointer;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::set_return_address;
#[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_code; pub use compiler::backend::aarch64::emit_code;
#[cfg(target_arch = "aarch64")] #[cfg(target_arch = "aarch64")]
pub use compiler::backend::aarch64::emit_context; pub use compiler::backend::aarch64::emit_context;
......
use ast::ir::*; use ast::ir::*;
use ast::ptr::*; use ast::ptr::*;
use ast::types::*; use ast::types::*;
use runtime::ValueLocation; use compiler::backend::get_callee_saved_offset;
use std::fmt; use std::fmt;
use std::collections::HashMap; use std::collections::HashMap;
...@@ -25,8 +25,9 @@ pub struct Frame { ...@@ -25,8 +25,9 @@ pub struct Frame {
pub argument_by_stack: HashMap<MuID, P<Value>>, pub argument_by_stack: HashMap<MuID, P<Value>>,
pub allocated: HashMap<MuID, FrameSlot>, pub allocated: HashMap<MuID, FrameSlot>,
// Maping from callee saved id (i.e. the position in the list of callee saved registers) and offset from the frame pointer
pub callee_saved: HashMap<isize, isize>,
// (callsite, destination address) // (callsite, destination address)
exception_callsites: Vec<(ValueLocation, ValueLocation)>
} }
impl fmt::Display for Frame { impl fmt::Display for Frame {
...@@ -37,9 +38,6 @@ impl fmt::Display for Frame { ...@@ -37,9 +38,6 @@ impl fmt::Display for Frame {
writeln!(f, " {}", slot).unwrap(); writeln!(f, " {}", slot).unwrap();
} }
writeln!(f, " exception callsites:").unwrap(); writeln!(f, " exception callsites:").unwrap();
for &(ref callsite, ref dest) in self.exception_callsites.iter() {
writeln!(f, " callsite: {} -> {}", callsite, dest).unwrap()
}
writeln!(f, " cur offset: {}", self.cur_offset).unwrap(); writeln!(f, " cur offset: {}", self.cur_offset).unwrap();
writeln!(f, "}}") writeln!(f, "}}")
} }
...@@ -52,9 +50,8 @@ impl Frame { ...@@ -52,9 +50,8 @@ impl Frame {