Commit 18349852 authored by qinsoon's avatar qinsoon

[wip] going to emit code

parent 5f1f7500
......@@ -23,6 +23,8 @@ struct ASMCode {
reg_defines: HashMap<MuID, Vec<ASMLocation>>,
reg_uses: HashMap<MuID, Vec<ASMLocation>>,
mem_op_used: HashMap<usize, bool>,
preds: Vec<Vec<usize>>,
succs: Vec<Vec<usize>>,
......@@ -52,6 +54,10 @@ impl MachineCode for ASMCode {
}
}
fn is_using_mem_op(&self, index: usize) -> bool {
*self.mem_op_used.get(&index).unwrap()
}
fn get_succs(&self, index: usize) -> &Vec<usize> {
&self.succs[index]
}
......@@ -265,28 +271,28 @@ impl ASMCodeGen {
let mut defines : Vec<MuID> = self.prepare_machine_regs(x86_64::RETURN_GPRs.iter());
defines.append(&mut self.prepare_machine_regs(x86_64::RETURN_FPRs.iter()));
self.add_asm_inst(code, defines, vec![], uses, vec![]);
self.add_asm_inst(code, defines, vec![], uses, vec![], false);
}
fn add_asm_ret(&mut self, code: String) {
let mut uses : Vec<MuID> = self.prepare_machine_regs(x86_64::RETURN_GPRs.iter());
uses.append(&mut self.prepare_machine_regs(x86_64::RETURN_FPRs.iter()));
self.add_asm_inst(code, vec![], vec![], uses, vec![]);
self.add_asm_inst(code, vec![], vec![], uses, vec![], false);
}
fn add_asm_branch(&mut self, code: String, target: &'static str) {
let l = self.line();
self.cur_mut().code.push(ASM::branch(code));
self.cur_mut().branches.insert(l, target);
self.add_asm_inst(code, vec![], vec![], vec![], vec![], false);
}
fn add_asm_branch2(&mut self, code: String, target: &'static str) {
let l = self.line();
self.cur_mut().code.push(ASM::branch(code));
self.cur_mut().cond_branches.insert(l, target);
self.add_asm_inst(code, vec![], vec![], vec![], vec![], false);
}
fn add_asm_inst(
......@@ -295,7 +301,8 @@ impl ASMCodeGen {
defines: Vec<MuID>,
mut define_locs: Vec<ASMLocation>,
uses: Vec<MuID>,
mut use_locs: Vec<ASMLocation>)
mut use_locs: Vec<ASMLocation>,
is_using_mem_op: bool)
{
let line = self.line();
......@@ -335,6 +342,7 @@ impl ASMCodeGen {
// put the instruction
mc.code.push(ASM::inst(code, defines, uses));
mc.mem_op_used.insert(line, is_using_mem_op);
}
fn define_reg(&mut self, reg: &P<Value>, loc: ASMLocation) {
......@@ -572,6 +580,8 @@ impl CodeGenerator for ASMCodeGen {
reg_defines: HashMap::new(),
reg_uses: HashMap::new(),
mem_op_used: HashMap::new(),
preds: vec![],
succs: vec![],
......@@ -687,7 +697,8 @@ impl CodeGenerator for ASMCodeGen {
vec![],
vec![],
vec![id1, id2],
vec![loc1, loc2]
vec![loc1, loc2],
false
);
}
......@@ -703,7 +714,8 @@ impl CodeGenerator for ASMCodeGen {
vec![],
vec![],
vec![id1],
vec![loc1]
vec![loc1],
false
)
}
......@@ -724,7 +736,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id1],
vec![loc1],
vec![],
vec![]
vec![],
false
)
}
......@@ -741,7 +754,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id2],
vec![loc2],
id1,
loc1
loc1,
true
)
}
......@@ -762,7 +776,8 @@ impl CodeGenerator for ASMCodeGen {
vec![], // not defining anything (write to memory)
vec![],
id2,
loc2
loc2,
true
)
}
......@@ -778,7 +793,8 @@ impl CodeGenerator for ASMCodeGen {
vec![],
vec![],
id,
loc
loc,
true
)
}
......@@ -795,7 +811,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id2],
vec![loc2],
vec![id1],
vec![loc1]
vec![loc1],
false
)
}
......@@ -812,7 +829,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id2],
vec![loc2.clone()],
vec![id1, id2],
vec![loc1, loc2]
vec![loc1, loc2],
false
)
}
......@@ -833,7 +851,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id1],
vec![loc1.clone()],
vec![id1],
vec![loc1]
vec![loc1],
false
)
}
......@@ -850,7 +869,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id2],
vec![loc2.clone()],
vec![id1, id2],
vec![loc1, loc2]
vec![loc1, loc2],
false
)
}
......@@ -871,7 +891,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id1],
vec![loc1.clone()],
vec![id1],
vec![loc1]
vec![loc1],
false
)
}
......@@ -889,7 +910,8 @@ impl CodeGenerator for ASMCodeGen {
vec![rax, rdx],
vec![],
vec![id, rax],
vec![loc]
vec![loc],
false
)
}
......@@ -1015,7 +1037,8 @@ impl CodeGenerator for ASMCodeGen {
vec![rsp],
vec![],
vec![id, rsp],
vec![loc]
vec![loc],
false
)
}
......@@ -1032,7 +1055,8 @@ impl CodeGenerator for ASMCodeGen {
vec![id, rsp],
vec![loc.clone()],
vec![rsp],
vec![]
vec![],
false
)
}
}
......@@ -15,7 +15,7 @@ impl PeepholeOptimization {
}
pub fn remove_redundant_move(&mut self, inst: usize, cf: &mut CompiledFunction) {
if cf.mc.is_move(inst) {
if cf.mc.is_move(inst) && !cf.mc.is_using_mem_op(inst) {
cf.mc.trace_inst(inst);
let src : MuID = {
......
......@@ -307,18 +307,21 @@ pub fn build_chaitin_briggs (cf: &CompiledFunction, func: &MuFunctionVersion) ->
let src = cf.mc.get_inst_reg_uses(i);
let dst = cf.mc.get_inst_reg_defines(i);
// src may be an immediate number
// but dest is a register or a memory location
debug_assert!(dst.len() == 1);
if src.len() == 1 {
let node1 = ig.get_node(src[0]);
let node2 = ig.get_node(dst[0]);
ig.add_move(node1, node2);
Some(src[0])
} else {
// src: reg/imm/mem
// dest: reg/mem
// we dont care if src/dest is mem
if cf.mc.is_using_mem_op(i) {
None
} else {
if src.len() == 1 {
let node1 = ig.get_node(src[0]);
let node2 = ig.get_node(dst[0]);
ig.add_move(node1, node2);
Some(src[0])
} else {
None
}
}
} else {
None
......
......@@ -17,6 +17,8 @@ pub trait MachineCode {
fn number_of_insts(&self) -> usize;
fn is_move(&self, index: usize) -> bool;
fn is_using_mem_op(&self, index: usize) -> bool;
fn get_succs(&self, index: usize) -> &Vec<usize>;
fn get_preds(&self, index: usize) -> &Vec<usize>;
......
......@@ -341,7 +341,7 @@ pub fn global_access() -> VMContext {
let global_a = vm.declare_global("a", type_def_int64.clone());
// .funcsig @global_access_sig = () -> ()
let func_sig = vm.declare_func_sig("global_access_sig", vec![], vec![]);
let func_sig = vm.declare_func_sig("global_access_sig", vec![type_def_int64.clone()], vec![]);
// .funcdecl @global_access <@global_access_sig>
let func = MuFunction::new("global_access", func_sig.clone());
......@@ -353,22 +353,10 @@ pub fn global_access() -> VMContext {
// %blk_0():
let mut blk_0 = Block::new("blk_0");
// %x = LOAD <@int_64> @a
let blk_0_x = func_ver.new_ssa("blk_0_x", type_def_int64.clone()).clone_value();
let blk_0_a = func_ver.new_global(global_a.clone());
let blk_0_inst0 = func_ver.new_inst(Instruction{
value: Some(vec![blk_0_x]),
ops: RefCell::new(vec![blk_0_a.clone()]),
v: Instruction_::Load{
is_ptr: false,
order: MemoryOrder::Relaxed,
mem_loc: 0
}
});
// STORE <@int_64> @a @int_64_1
let blk_0_a = func_ver.new_global(global_a.clone());
let blk_0_const_int64_1 = func_ver.new_constant(const_def_int64_1.clone());
let blk_0_inst1 = func_ver.new_inst(Instruction{
let blk_0_inst0 = func_ver.new_inst(Instruction{
value: None,
ops: RefCell::new(vec![blk_0_a.clone(), blk_0_const_int64_1.clone()]),
v: Instruction_::Store{
......@@ -378,11 +366,23 @@ pub fn global_access() -> VMContext {
value: 1
}
});
// %x = LOAD <@int_64> @a
let blk_0_x = func_ver.new_ssa("blk_0_x", type_def_int64.clone());
let blk_0_inst1 = func_ver.new_inst(Instruction{
value: Some(vec![blk_0_x.clone_value()]),
ops: RefCell::new(vec![blk_0_a.clone()]),
v: Instruction_::Load{
is_ptr: false,
order: MemoryOrder::Relaxed,
mem_loc: 0
}
});
let blk_0_term = func_ver.new_inst(Instruction{
value: None,
ops: RefCell::new(vec![]),
v: Instruction_::Return(vec![])
ops: RefCell::new(vec![blk_0_x.clone()]),
v: Instruction_::Return(vec![0])
});
let blk_0_content = BlockContent {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment