Commit 60328c8f authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Prioritized using caller saved registers over callee saved registers, fixed...

Prioritized using caller saved registers over callee saved registers, fixed bug when passing constant int<128> to functions, improved saving of XR and elimited all warnings in the aarch64 backend.
parent e28cb012
#![allow(dead_code)]
use compiler::backend::AOT_EMIT_CONTEXT_FILE;
use compiler::backend::RegGroup;
use utils::ByteSize;
......@@ -21,7 +19,6 @@ use ast::ir::*;
use std::str;
use std::usize;
use std::slice::Iter;
use std::ops;
use std::collections::HashSet;
......@@ -97,16 +94,6 @@ impl ASMCode {
panic!("didnt find any block for inst {}", inst)
}
fn get_block_by_start_inst(&self, inst: usize) -> Option<&ASMBlock> {
for block in self.blocks.values() {
if block.start_inst == inst {
return Some(block);
}
}
None
}
fn rewrite_insert(
&self,
insert_before: LinkedHashMap<usize, Vec<Box<ASMCode>>>,
......@@ -270,7 +257,6 @@ impl ASMCode {
// control flow analysis
let n_insts = self.number_of_insts();
let ref blocks = self.blocks;
let ref mut asm = self.code;
for i in 0..n_insts {
......@@ -886,21 +872,6 @@ impl ASMInst {
spill_info: spill_info
}
}
fn nop() -> ASMInst {
ASMInst {
code: "".to_string(),
defines: LinkedHashMap::new(),
uses: LinkedHashMap::new(),
is_symbol: false,
is_mem_op_used: false,
preds: vec![],
succs: vec![],
branch: ASMBranchTarget::None,
spill_info: None
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
......@@ -983,11 +954,6 @@ impl ASMCodeGen {
self.cur().code.len()
}
fn add_asm_label(&mut self, code: String) {
trace!("emit: {}", code);
self.cur_mut().code.push(ASMInst::symbolic(code));
}
fn add_asm_block_label(&mut self, code: String, block_name: MuName) {
trace!("emit: [{}]{}", block_name, code);
self.cur_mut().code.push(ASMInst::symbolic(code));
......@@ -998,10 +964,6 @@ impl ASMCodeGen {
self.cur_mut().code.push(ASMInst::symbolic(code));
}
fn prepare_machine_regs(&self, regs: Iter<P<Value>>) -> Vec<MuID> {
regs.map(|x| self.prepare_machine_reg(x)).collect()
}
fn add_asm_call(&mut self, code: String, potentially_excepting: Option<MuName>, target: Option<(MuID, ASMLocation)>) {
// a call instruction will use all the argument registers
// do not need
......@@ -1045,22 +1007,6 @@ impl ASMCodeGen {
}, None)
}
fn add_asm_ret(&mut self, code: String) {
// return instruction does not use anything (not RETURN REGS)
// otherwise it will keep RETURN REGS alive
// and if there is no actual move into RETURN REGS, it will keep RETURN REGS for alive for very long
// and prevents anything using those regsiters
self.add_asm_inst_internal(code, linked_hashmap! {}, linked_hashmap! {}, false, ASMBranchTarget::Return, None);
}
fn add_asm_branch(&mut self, code: String, target: MuName) {
self.add_asm_inst_internal(code, linked_hashmap! {}, linked_hashmap! {}, false, ASMBranchTarget::Unconditional(target), None);
}
fn add_asm_branch2(&mut self, code: String, target: MuName) {
self.add_asm_inst_internal(code, linked_hashmap! {}, linked_hashmap! {}, false, ASMBranchTarget::Conditional(target), None);
}
fn add_asm_inst(
&mut self,
code: String,
......@@ -1101,7 +1047,6 @@ impl ASMCodeGen {
target: ASMBranchTarget,
spill_info: Option<SpillMemInfo>)
{
let line = self.line();
trace!("asm: {}", code);
trace!(" defines: {:?}", defines);
trace!(" uses: {:?}", uses);
......@@ -1124,30 +1069,6 @@ impl ASMCodeGen {
(str, op.extract_ssa_id().unwrap(), ASMLocation::new(self.line(), loc, len, check_op_len(&op.ty)))
}
fn prepare_fpreg(&self, op: &P<Value>, loc: usize) -> (String, MuID, ASMLocation) {
if cfg!(debug_assertions) {
match op.v {
Value_::SSAVar(_) => {},
_ => panic!("expecting register op")
}
}
let str = self.asm_reg_op(op);
let len = str.len();
(str, op.extract_ssa_id().unwrap(), ASMLocation::new(self.line(), loc, len, 64))
}
fn prepare_machine_reg(&self, op: &P<Value>) -> MuID {
if cfg!(debug_assertions) {
match op.v {
Value_::SSAVar(_) => {},
_ => panic!("expecting machine register op")
}
}
op.extract_ssa_id().unwrap()
}
fn prepare_mem(&self, op: &P<Value>, loc: usize) -> (String, LinkedHashMap<MuID, Vec<ASMLocation>>) {
if cfg!(debug_assertions) {
match op.v {
......@@ -1992,19 +1913,6 @@ impl ASMCodeGen {
fn emit_str_spill(&mut self, dest: Mem, src: Reg) { self.internal_store("STR", dest, src, true, false); }
}
// Only used for loads and stores
#[inline(always)]
fn op_postfix(op_len: usize) -> &'static str {
match op_len {
1 => "B",
8 => "B",
16 => "H",
32 => "",
64 => "",
_ => panic!("unexpected op size: {}", op_len)
}
}
impl CodeGenerator for ASMCodeGen {
fn start_code(&mut self, func_name: MuName, entry: MuName) -> ValueLocation {
self.cur = Some(Box::new(ASMCode {
......@@ -3017,10 +2925,6 @@ fn directive_globl(name: String) -> String {
format!(".globl {}", name)
}
fn directive_comm(name: String, size: ByteSize, align: ByteSize) -> String {
format!(".comm {},{},{}", name, size, align)
}
use compiler::machine_code::CompiledFunction;
pub fn spill_rewrite(
......
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
// TODO: Move architecture independent codes in here, inst_sel and asm_backend to somewhere else...
pub mod inst_sel;
......@@ -639,19 +638,6 @@ lazy_static! {
// put caller saved regs first (they imposes no overhead if there is no call instruction)
pub static ref ALL_USABLE_MACHINE_REGs : Vec<P<Value>> = vec![
X19.clone(),
X20.clone(),
X21.clone(),
X22.clone(),
X23.clone(),
X24.clone(),
X25.clone(),
X26.clone(),
X27.clone(),
X28.clone(),
//X29.clone(), // Frame Pointer
//X30.clone(), // Link Register
X0.clone(),
X1.clone(),
X2.clone(),
......@@ -672,6 +658,19 @@ lazy_static! {
X17.clone(),
// X18.clone(), // Platform Register
X19.clone(),
X20.clone(),
X21.clone(),
X22.clone(),
X23.clone(),
X24.clone(),
X25.clone(),
X26.clone(),
X27.clone(),
X28.clone(),
//X29.clone(), // Frame Pointer
//X30.clone(), // Link Register
D8.clone(),
D9.clone(),
D10.clone(),
......@@ -1220,7 +1219,6 @@ pub fn node_type(op: &TreeNode) -> P<MuType> {
}
}
TreeNode_::Value(ref pv) => pv.ty.clone(),
_ => panic!("expected node value")
}
}
......@@ -1240,7 +1238,7 @@ pub fn match_value_int_imm(op: &P<Value>) -> bool {
pub fn match_node_value(op: &TreeNode) -> bool {
match op.v {
TreeNode_::Value(ref pv) => true,
TreeNode_::Value(_) => true,
_ => false
}
}
......@@ -1701,7 +1699,17 @@ fn emit_reg_value(backend: &mut CodeGenerator, pv: &P<Value>, f_context: &mut Fu
tmp
//}
},
&Constant::IntEx(ref val) => { unimplemented!() },
&Constant::IntEx(ref val) => {
assert!(val.len() == 2);
let tmp = make_temporary(f_context, pv.ty.clone(), vm);
let (tmp_l, tmp_h) = split_int128(&tmp, f_context, vm);
emit_mov_u64(backend, &tmp_l, val[0]);
emit_mov_u64(backend, &tmp_h, val[1]);
tmp
},
&Constant::FuncRef(func_id) => {
let tmp = make_temporary(f_context, pv.ty.clone(), vm);
......@@ -1751,7 +1759,17 @@ pub fn emit_ireg_value(backend: &mut CodeGenerator, pv: &P<Value>, f_context: &m
tmp
//}
},
&Constant::IntEx(ref val) => { unimplemented!() },
&Constant::IntEx(ref val) => {
assert!(val.len() == 2);
let tmp = make_temporary(f_context, pv.ty.clone(), vm);
let (tmp_l, tmp_h) = split_int128(&tmp, f_context, vm);
emit_mov_u64(backend, &tmp_l, val[0]);
emit_mov_u64(backend, &tmp_h, val[1]);
tmp
},
&Constant::FuncRef(func_id) => {
let tmp = make_temporary(f_context, pv.ty.clone(), vm);
......@@ -1877,7 +1895,7 @@ pub fn emit_mem(backend: &mut CodeGenerator, pv: &P<Value>, f_context: &mut Func
})
})
}
&MemoryLocation::Symbolic{ref label, is_global} => {
&MemoryLocation::Symbolic{is_global, ..} => {
if is_global {
let temp = make_temporary(f_context, pv.ty.clone(), vm);
emit_addr_sym(backend, &temp, &pv, vm);
......@@ -1975,7 +1993,7 @@ fn emit_mem_base(backend: &mut CodeGenerator, pv: &P<Value>, f_context: &mut Fun
base.clone()
}
}
&MemoryLocation::Symbolic{ref label, is_global} => {
&MemoryLocation::Symbolic{..} => {
let temp = make_temporary(f_context, pv.ty.clone(), vm);
emit_addr_sym(backend, &temp, &pv, vm);
temp
......@@ -2071,7 +2089,7 @@ fn emit_calculate_address(backend: &mut CodeGenerator, dest: &P<Value>, src: &P<
}
},
Value_::Memory(MemoryLocation::Symbolic{ref label, is_global}) => {
Value_::Memory(MemoryLocation::Symbolic{..}) => {
emit_addr_sym(backend, &dest, &src, vm);
}
_ => panic!("expect mem location as value")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment