Commit 0bf63ab8 authored by Yi Lin's avatar Yi Lin

Merge branch 'develop' into 'master'

merge with develop: fix bugs around exception handling, etc, better debugging output, all current tests run

See merge request !4
parents 29a68241 498338a5
<?xml version="1.0" encoding="UTF-8"?>
<module type="RUST_MODULE" version="4">
<component name="FacetManager">
<facet type="Python" name="Python">
<configuration sdkName="" />
</facet>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
......
......@@ -2,6 +2,7 @@ use ir::*;
use ptr::*;
use types::*;
use op::*;
use ir_semantics;
use utils::vec_utils;
......@@ -68,6 +69,63 @@ impl Clone for Instruction {
}
impl Instruction {
pub fn has_exception_clause(&self) -> bool {
ir_semantics::is_potentially_excepting_instruction(&self.v)
}
pub fn get_exception_target(&self) -> Option<MuID> {
use inst::Instruction_::*;
match self.v {
Watchpoint {ref resume, ..}
| Call {ref resume, ..}
| CCall {ref resume, ..}
| SwapStack {ref resume, ..}
| ExnInstruction {ref resume, ..} => {
Some(resume.exn_dest.target)
},
BinOp(_, _, _)
| BinOpWithStatus(_, _, _, _)
| CmpOp(_, _, _)
| ConvOp{..}
| ExprCall{..}
| ExprCCall{..}
| Load{..}
| Store{..}
| CmpXchg{..}
| AtomicRMW{..}
| New(_)
| AllocA(_)
| NewHybrid(_, _)
| AllocAHybrid(_, _)
| NewStack(_)
| NewThread(_, _)
| NewThreadExn(_, _)
| NewFrameCursor(_)
| GetIRef(_)
| GetFieldIRef{..}
| GetElementIRef{..}
| ShiftIRef{..}
| GetVarPartIRef{..}
| Fence(_)
| Return(_)
| ThreadExit
| Throw(_)
| TailCall(_)
| Branch1(_)
| Branch2{..}
| Select{..}
| WPBranch{..}
| Switch{..}
| CommonInst_GetThreadLocal
| CommonInst_SetThreadLocal(_)
| CommonInst_Pin(_)
| CommonInst_Unpin(_)
| Move(_)
| PrintHex(_) => None
}
}
fn debug_str(&self, ops: &Vec<P<TreeNode>>) -> String {
self.v.debug_str(ops)
}
......@@ -490,7 +548,11 @@ pub struct CallData {
impl CallData {
fn debug_str(&self, ops: &Vec<P<TreeNode>>) -> String {
format!("{:?} {} [{}]", self.convention, ops[self.func], op_vector_str(&self.args, ops))
let func_name = match ops[self.func].name() {
Some(name) => name,
None => "Anonymous Function".to_string()
};
format!("{:?} {} [{}]", self.convention, func_name, op_vector_str(&self.args, ops))
}
}
......
......@@ -5,6 +5,7 @@ use op::*;
use utils::vec_utils;
use utils::LinkedHashMap;
use utils::LinkedHashSet;
use std::fmt;
use std::default;
......@@ -134,7 +135,7 @@ impl fmt::Debug for MuFunctionVersion {
write!(f, "Empty\n").unwrap();
}
if self.block_trace.is_some() {
write!(f, "{:?}\n", self.block_trace.as_ref().unwrap())
write!(f, "Block Trace: {:?}\n", self.block_trace.as_ref().unwrap())
} else {
write!(f, "Trace not available\n")
}
......@@ -300,10 +301,13 @@ impl MuFunctionVersion {
}
}
#[derive(RustcEncodable, RustcDecodable)]
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct FunctionContent {
pub entry: MuID,
pub blocks: LinkedHashMap<MuID, Block>
pub blocks: LinkedHashMap<MuID, Block>,
// this field only valid after control flow analysis
pub exception_blocks: LinkedHashSet<MuID>
}
impl fmt::Debug for FunctionContent {
......@@ -321,22 +325,15 @@ impl fmt::Debug for FunctionContent {
}
}
impl Clone for FunctionContent {
fn clone(&self) -> Self {
let mut new_blocks = LinkedHashMap::new();
for (id, block) in self.blocks.iter() {
new_blocks.insert(*id, block.clone());
}
impl FunctionContent {
pub fn new(entry: MuID, blocks: LinkedHashMap<MuID, Block>) -> FunctionContent {
FunctionContent {
entry: self.entry,
blocks: new_blocks
entry: entry,
blocks: blocks,
exception_blocks: LinkedHashSet::new()
}
}
}
impl FunctionContent {
pub fn get_entry_block(&self) -> &Block {
self.get_block(self.entry)
}
......@@ -432,7 +429,7 @@ impl Block {
Block{hdr: MuEntityHeader::unnamed(id), content: None, control_flow: ControlFlow::default()}
}
pub fn is_exception_block(&self) -> bool {
pub fn is_receiving_exception_arg(&self) -> bool {
return self.content.as_ref().unwrap().exn_arg.is_some()
}
......@@ -640,7 +637,6 @@ impl TreeNode {
match self.v {
TreeNode_::Value(ref val) => val.clone(),
TreeNode_::Instruction(ref inst) => {
warn!("expecting a value, but we found an inst. Instead we use its first value");
let vals = inst.value.as_ref().unwrap();
if vals.len() != 1 {
panic!("we expect an inst with 1 value, but found multiple or zero (it should not be here - folded as a child)");
......@@ -768,6 +764,7 @@ impl Value {
}
const DISPLAY_TYPE : bool = false;
const PRINT_ABBREVIATE_NAME: bool = true;
impl fmt::Debug for Value {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
......@@ -890,6 +887,9 @@ impl SSAVarEntry {
pub fn increase_use_count(&self) {
self.use_count.fetch_add(1, Ordering::SeqCst);
}
pub fn reset_use_count(&self) {
self.use_count.store(0, Ordering::SeqCst);
}
pub fn has_expr(&self) -> bool {
self.expr.is_some()
......@@ -931,7 +931,7 @@ impl fmt::Display for Constant {
&Constant::Float(v) => write!(f, "{}", v),
&Constant::Double(v) => write!(f, "{}", v),
// &Constant::IRef(v) => write!(f, "{}", v),
&Constant::FuncRef(v) => write!(f, "{}", v),
&Constant::FuncRef(v) => write!(f, "FuncRef {}", v),
&Constant::Vector(ref v) => {
write!(f, "[").unwrap();
for i in 0..v.len() {
......@@ -1071,6 +1071,8 @@ impl MuEntityHeader {
}
pub fn name_check(name: MuName) -> MuName {
let name = name.replace('.', "$");
if name.starts_with("@") || name.starts_with("%") {
let (_, name) = name.split_at(1);
......@@ -1079,6 +1081,29 @@ impl MuEntityHeader {
name
}
fn abbreviate_name(&self) -> Option<MuName> {
match self.name() {
Some(name) => {
let split: Vec<&str> = name.split('$').collect();
let mut ret = "".to_string();
for i in 0..split.len() - 1 {
ret.push(match split[i].chars().next() {
Some(c) => c,
None => '_'
});
ret.push('.');
}
ret.push_str(split.last().unwrap());
Some(ret)
}
None => None
}
}
}
impl PartialEq for MuEntityHeader {
......@@ -1087,21 +1112,13 @@ impl PartialEq for MuEntityHeader {
}
}
const PRINT_ABBREVIATE_NAME: bool = false;
impl fmt::Display for MuEntityHeader {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.name().is_none() {
write!(f, "UNNAMED #{}", self.id)
} else {
if PRINT_ABBREVIATE_NAME {
let name = self.name().unwrap().clone();
let abbr_name = name.split('.').map(
|x| match x.chars().next() {
Some(c) => c,
None => '_'
}).fold("".to_string(), |mut acc, x| {acc.push(x); acc});
write!(f, "{} #{}", abbr_name, self.id)
write!(f, "{} #{}", self.abbreviate_name().unwrap(), self.id)
} else {
write!(f, "{} #{}", self.name().unwrap(), self.id)
}
......
......@@ -103,3 +103,53 @@ pub fn has_side_effect(inst: &Instruction_) -> bool {
&PrintHex(_) => true
}
}
pub fn is_potentially_excepting_instruction(inst: &Instruction_) -> bool {
match inst {
&Watchpoint{..}
| &Call{..}
| &CCall{..}
| &SwapStack{..}
| &ExnInstruction{..} => true,
&BinOp(_, _, _)
| &BinOpWithStatus(_, _, _, _)
| &CmpOp(_, _, _)
| &ConvOp{..}
| &ExprCall{..}
| &ExprCCall{..}
| &Load{..}
| &Store{..}
| &CmpXchg{..}
| &AtomicRMW{..}
| &New(_)
| &AllocA(_)
| &NewHybrid(_, _)
| &AllocAHybrid(_, _)
| &NewStack(_)
| &NewThread(_, _)
| &NewThreadExn(_, _)
| &NewFrameCursor(_)
| &GetIRef(_)
| &GetFieldIRef{..}
| &GetElementIRef{..}
| &ShiftIRef{..}
| &GetVarPartIRef{..}
| &Fence(_)
| &Return(_)
| &ThreadExit
| &Throw(_)
| &TailCall(_)
| &Branch1(_)
| &Branch2{..}
| &Select{..}
| &WPBranch{..}
| &Switch{..}
| &CommonInst_GetThreadLocal
| &CommonInst_SetThreadLocal(_)
| &CommonInst_Pin(_)
| &CommonInst_Unpin(_)
| &Move(_)
| &PrintHex(_) => false
}
}
\ No newline at end of file
......@@ -376,6 +376,35 @@ impl ASMCode {
panic!("conditional branch does not have a fallthrough target");
}
},
ASMBranchTarget::PotentiallyExcepting(ref target) => {
// may trigger exception and jump to target - similar as conditional branch
let target_n = self.blocks.get(target).unwrap().start_inst;
// cur inst's succ is target
asm[i].succs.push(target_n);
if TRACE_CFA {
trace!("inst {}: is potentially excepting to {}", i, target);
trace!("inst {}: excepting target index is {}", i, target_n);
trace!("inst {}: set SUCCS as excepting target {}", i, target_n);
}
asm[target_n].preds.push(i);
if let Some(next_inst) = ASMCode::find_next_inst(i, asm) {
// cur succ is next inst
asm[i].succs.push(next_inst);
// next inst's pred is cur
asm[next_inst].preds.push(i);
if TRACE_CFA {
trace!("inst {}: SET SUCCS as PEI fallthrough target {}", i, next_inst);
}
} else {
panic!("PEI does not have a fallthrough target");
}
},
ASMBranchTarget::Return => {
if TRACE_CFA {
trace!("inst {}: is a return", i);
......@@ -436,6 +465,25 @@ impl ASMCode {
}
}
fn find_last_inst(i: usize, asm: &Vec<ASMInst>) -> Option<usize> {
if i == 0 {
None
} else {
let mut cur = i;
loop {
if !asm[cur].is_symbol {
return Some(cur);
}
if cur == 0 {
return None;
} else {
cur -= 1;
}
}
}
}
fn add_frame_size_patchpoint(&mut self, patchpoint: ASMLocation) {
self.frame_size_patchpoints.push(patchpoint);
}
......@@ -602,8 +650,9 @@ impl MachineCode for ASMCode {
}
fn set_inst_nop(&mut self, index: usize) {
self.code.remove(index);
self.code.insert(index, ASMInst::nop());
self.code[index].code.clear();
// self.code.remove(index);
// self.code.insert(index, ASMInst::nop());
}
fn remove_unnecessary_callee_saved(&mut self, used_callee_saved: Vec<MuID>) -> Vec<MuID> {
......@@ -656,15 +705,8 @@ impl MachineCode for ASMCode {
regs_to_remove
}
#[allow(unused_variables)]
fn patch_frame_size(&mut self, size: usize, size_used: usize) {
// calling convention requires stack pointer to be 16 bytes aligned before a call
// we make frame size a multipl of 16 bytes
let size = if (size + size_used) % 16 == 0 {
size
} else {
( (size + size_used) / 16 + 1) * 16 - size_used
};
let size = size.to_string();
debug_assert!(size.len() <= FRAME_SIZE_PLACEHOLDER_LEN);
......@@ -680,12 +722,30 @@ impl MachineCode for ASMCode {
let mut ret = vec![];
for inst in self.code.iter() {
if !inst.is_symbol {
ret.append(&mut "\t".to_string().into_bytes());
}
ret.append(&mut inst.code.clone().into_bytes());
ret.append(&mut "\n".to_string().into_bytes());
}
ret
}
fn emit_inst(&self, index: usize) -> Vec<u8> {
let mut ret = vec![];
let ref inst = self.code[index];
if !inst.is_symbol {
ret.append(&mut "\t".to_string().into_bytes());
}
ret.append(&mut inst.code.clone().into_bytes());
ret
}
fn trace_mc(&self) {
trace!("");
......@@ -744,6 +804,24 @@ impl MachineCode for ASMCode {
None => None
}
}
fn get_block_for_inst(&self, index: usize) -> Option<MuName> {
for (name, block) in self.blocks.iter() {
if index >= block.start_inst && index < block.end_inst {
return Some(name.clone());
}
}
None
}
fn get_next_inst(&self, index: usize) -> Option<usize> {
ASMCode::find_next_inst(index, &self.code)
}
fn get_last_inst(&self, index: usize) -> Option<usize> {
ASMCode::find_last_inst(index, &self.code)
}
}
#[derive(Clone, Debug)]
......@@ -751,6 +829,7 @@ enum ASMBranchTarget {
None,
Conditional(MuName),
Unconditional(MuName),
PotentiallyExcepting(MuName),
Return
}
......@@ -930,7 +1009,7 @@ impl ASMCodeGen {
regs.map(|x| self.prepare_machine_reg(x)).collect()
}
fn add_asm_call(&mut self, code: String) {
fn add_asm_call(&mut self, code: String, potentially_excepting: Option<MuName>) {
// a call instruction will use all the argument registers
// do not need
let uses : LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
......@@ -960,7 +1039,13 @@ impl ASMCodeGen {
}
}
self.add_asm_inst(code, defines, uses, false);
self.add_asm_inst_internal(code, defines, uses, false, {
if potentially_excepting.is_some() {
ASMBranchTarget::PotentiallyExcepting(potentially_excepting.unwrap())
} else {
ASMBranchTarget::None
}
}, None)
}
fn add_asm_ret(&mut self, code: String) {
......@@ -1930,6 +2015,25 @@ impl CodeGenerator for ASMCodeGen {
}
}
fn add_cfi_startproc(&mut self) {
self.add_asm_symbolic(".cfi_startproc".to_string());
}
fn add_cfi_endproc(&mut self) {
self.add_asm_symbolic(".cfi_endproc".to_string());
}
fn add_cfi_def_cfa_register(&mut self, reg: Reg) {
let reg = self.asm_reg_op(reg);
self.add_asm_symbolic(format!(".cfi_def_cfa_register {}", reg));
}
fn add_cfi_def_cfa_offset(&mut self, offset: i32) {
self.add_asm_symbolic(format!(".cfi_def_cfa_offset {}", offset));
}
fn add_cfi_offset(&mut self, reg: Reg, offset: i32) {
let reg = self.asm_reg_op(reg);
self.add_asm_symbolic(format!(".cfi_offset {}, {}", reg, offset));
}
fn emit_frame_grow(&mut self) {
trace!("emit frame grow");
......@@ -2703,11 +2807,11 @@ impl CodeGenerator for ASMCodeGen {
}
#[cfg(target_os = "macos")]
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation {
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
let asm = format!("call {}", symbol(func));
self.add_asm_call(asm);
self.add_asm_call(asm, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
......@@ -2718,13 +2822,13 @@ impl CodeGenerator for ASMCodeGen {
#[cfg(target_os = "linux")]
// generating Position-Independent Code using PLT
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation {
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
let func = func + "@PLT";
let asm = format!("call {}", symbol(func));
self.add_asm_call(asm);
self.add_asm_call(asm, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
......@@ -2733,12 +2837,12 @@ impl CodeGenerator for ASMCodeGen {
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation {
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
}
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation {
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
}
......@@ -3057,14 +3161,7 @@ impl CodeGenerator for ASMCodeGen {
}
}
fn create_emit_directory(vm: &VM) {
use std::fs;
match fs::create_dir(&vm.vm_options.flag_aot_emit_dir) {
Ok(_) => {},
Err(_) => {}
}
}
use compiler::backend::code_emission::create_emit_directory;
use std::fs::File;
pub fn emit_code(fv: &mut MuFunctionVersion, vm: &VM) {
......
......@@ -21,6 +21,14 @@ pub trait CodeGenerator {
fn set_block_liveout(&mut self, block_name: MuName, live_out: &Vec<P<Value>>);
fn end_block(&mut self, block_name: MuName);
// add CFI info
fn add_cfi_startproc(&mut self);
fn add_cfi_endproc(&mut self);
fn add_cfi_def_cfa_register(&mut self, reg: Reg);
fn add_cfi_def_cfa_offset(&mut self, offset: i32);
fn add_cfi_offset(&mut self, reg: Reg, offset: i32);
// emit code to adjust frame
fn emit_frame_grow(&mut self);
fn emit_frame_shrink(&mut self);
......@@ -163,9 +171,9 @@ pub trait CodeGenerator {
fn emit_jl(&mut self, dest: MuName);
fn emit_jle(&mut self, dest: MuName);
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation;
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation;
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation;
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation;
fn emit_call_near_r64 (&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation;
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation;
fn emit_ret(&mut self);
......
......@@ -81,7 +81,7 @@ macro_rules! FPR {
GPR_ALIAS!(RAX_ALIAS: (0, RAX) -> EAX, AX , AL, AH);
GPR_ALIAS!(RCX_ALIAS: (5, RCX) -> ECX, CX , CL, CH);
GPR_ALIAS!(RDX_ALIAS: (10, RDX) -> EDX, DX , DL, DH);
GPR_ALIAS!(RDX_ALIAS: (10,RDX) -> EDX, DX , DL, DH);
GPR_ALIAS!(RBX_ALIAS: (15,RBX) -> EBX, BX , BL, BH);
GPR_ALIAS!(RSP_ALIAS: (20,RSP) -> ESP, SP , SPL);
GPR_ALIAS!(RBP_ALIAS: (24,RBP) -> EBP, BP , BPL);
......
This diff is collapsed.
......@@ -646,7 +646,7 @@ impl <'a> GraphColoring<'a> {
trace!("Color {} as {}", self.display_node(n), first_available_color);
if !backend::is_callee_saved(first_available_color) {
warn!("Use caller saved register {}", first_available_color);
trace!("Use caller saved register {}", first_available_color);
}
self.colored_nodes.push(n);
......
......@@ -271,9 +271,15 @@ fn local_liveness_analysis (cf: &mut CompiledFunction) -> LinkedHashMap<String,
None => panic!("cannot find range for block {}", block)
};
trace!("Block {}: start_inst={}, end_inst(inclusive)={}", block, range.start, range.end-1);
start_inst_map.insert(range.start, block);
end_inst_map.insert(range.end - 1, block);
let first_inst = range.start;
let last_inst = match mc.get_last_inst(range.end) {
Some(last) => last,
None => panic!("cannot find last instruction in block {}, this block contains no instruction?", block)
};
trace!("Block {}: start_inst={}, end_inst(inclusive)={}", block, first_inst, last_inst);
start_inst_map.insert(first_inst, block);
end_inst_map.insert(last_inst, block);
}
// local liveness analysis
......@@ -324,7 +330,7 @@ fn local_liveness_analysis (cf: &mut CompiledFunction) -> LinkedHashMap<String,
let succs : Vec<String> = {
let mut ret = vec![];
for succ in mc.get_succs(end - 1).into_iter() {
for succ in mc.get_succs(mc.get_last_inst(end).unwrap()).into_iter() {
match start_inst_map.get(succ) {
Some(str) => ret.push(String::from(*str)),
None => {}
......
......@@ -128,7 +128,7 @@ pub fn validate_regalloc(cf: &CompiledFunction,
}
debug!("{}", alive);
trace!("---");
debug!("---");
}
// find liveout of the block, and only preserve what is in the liveout
......@@ -314,7 +314,7 @@ fn add_def(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, is_mov: bool, al
// overwrite value, safe
} else {
if is_mov {
warn!("Temp{} and Temp{} is using the same Register{}, possibly coalesced", temp, old_temp, machine_reg);
debug!("Temp{} and Temp{} is using the same Register{}, possibly coalesced", temp, old_temp, machine_reg);
} else {
// trying to overwrite another value, error
error!("Temp{} and Temp{} try use the same Register{}", temp, old_temp, machine_reg);
......
......@@ -20,7 +20,7 @@ use vm::VM;
#[derive(RustcEncodable, RustcDecodable, Clone)]
pub struct Frame {
func_ver_id: MuID,
cur_offset: isize, // offset to rbp
cur_offset: isize, // offset to frame base pointer
pub argument_by_reg: HashMap<MuID, P<Value>>,
pub argument_by_stack: HashMap<MuID, P<Value>>,
......@@ -41,6 +41,7 @@ impl fmt::Display for Frame {
for &(ref callsite, ref dest) in self.exception_callsites.iter() {
writeln!(f, " callsite: {} -> {}", callsite, dest).unwrap()
}
writeln!(f, " cur offset: {}", self.cur_offset).unwrap();
writeln!(f, "}}")
}
}
......@@ -58,13 +59,18 @@ impl Frame {
exception_callsites: vec![]
}
}
pub fn cur_offset(&self) -> isize {
self.cur_offset
}</