inst_sel.rs 37 KB
Newer Older
1
use ast::ir::*;
2
use ast::ptr::*;
qinsoon's avatar
qinsoon committed
3
use ast::inst::Instruction;
4 5
use ast::inst::Destination;
use ast::inst::DestArg;
qinsoon's avatar
qinsoon committed
6
use ast::inst::Instruction_;
7
use ast::inst::MemoryOrder;
8
use ast::op;
qinsoon's avatar
qinsoon committed
9
use ast::types;
qinsoon's avatar
qinsoon committed
10
use ast::types::*;
qinsoon's avatar
qinsoon committed
11
use vm::VM;
qinsoon's avatar
qinsoon committed
12
use vm::CompiledFunction;
13 14

use compiler::CompilerPass;
qinsoon's avatar
qinsoon committed
15 16 17
use compiler::backend::x86_64;
use compiler::backend::x86_64::CodeGenerator;
use compiler::backend::x86_64::ASMCodeGen;
18

19 20
use std::collections::HashMap;

21
pub struct InstructionSelection {
22 23
    name: &'static str,
    
qinsoon's avatar
qinsoon committed
24
    backend: Box<CodeGenerator>
25 26
}

27 28 29 30 31 32 33 34 35 36 37 38 39 40
impl <'a> InstructionSelection {
    pub fn new() -> InstructionSelection {
        InstructionSelection{
            name: "Instruction Selection (x64)",
            backend: Box::new(ASMCodeGen::new())
        }
    }
    
    // in this pass, we assume that
    // 1. all temporaries will use 64bit registers
    // 2. we do not need to backup/restore caller-saved registers
    // 3. we need to backup/restore all the callee-saved registers
    // if any of these assumption breaks, we will need to re-emit the code
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
41
    fn instruction_select(&mut self, node: &'a TreeNode, cur_func: &MuFunctionVersion, vm: &VM) {
qinsoon's avatar
qinsoon committed
42 43 44
        trace!("instsel on node {}", node);
        
        match node.v {
45 46
            TreeNode_::Instruction(ref inst) => {
                match inst.v {
qinsoon's avatar
qinsoon committed
47 48 49
                    Instruction_::Branch2{cond, ref true_dest, ref false_dest, true_prob} => {
                        // move this to trace generation
                        // assert here
50 51 52 53 54
                        let (fallthrough_dest, branch_dest, branch_if_true) = {
                            if true_prob > 0.5f32 {
                                (true_dest, false_dest, false)
                            } else {
                                (false_dest, true_dest, true)
55
                            }
56
                        };
57
                        
qinsoon's avatar
qinsoon committed
58
                        let ops = inst.ops.read().unwrap();
59
                        
60 61
                        self.process_dest(&ops, fallthrough_dest, cur_func, vm);
                        self.process_dest(&ops, branch_dest, cur_func, vm);
qinsoon's avatar
qinsoon committed
62 63
                        
                        let branch_target = cur_func.content.as_ref().unwrap().get_block(branch_dest.target);
64 65 66
    
                        let ref cond = ops[cond];
                        
qinsoon's avatar
qinsoon committed
67 68
                        if self.match_cmp_res(cond) {
                            trace!("emit cmp_eq-branch2");
69
                            match self.emit_cmp_res(cond, cur_func, vm) {
qinsoon's avatar
qinsoon committed
70 71 72 73 74 75 76 77 78 79
                                op::CmpOp::EQ => self.backend.emit_je(branch_target),
                                op::CmpOp::NE => self.backend.emit_jne(branch_target),
                                op::CmpOp::UGE => self.backend.emit_jae(branch_target),
                                op::CmpOp::UGT => self.backend.emit_ja(branch_target),
                                op::CmpOp::ULE => self.backend.emit_jbe(branch_target),
                                op::CmpOp::ULT => self.backend.emit_jb(branch_target),
                                op::CmpOp::SGE => self.backend.emit_jge(branch_target),
                                op::CmpOp::SGT => self.backend.emit_jg(branch_target),
                                op::CmpOp::SLE => self.backend.emit_jle(branch_target),
                                op::CmpOp::SLT => self.backend.emit_jl(branch_target),
qinsoon's avatar
qinsoon committed
80 81 82 83
                                _ => unimplemented!()
                            }
                        } else if self.match_ireg(cond) {
                            trace!("emit ireg-branch2");
84
                            
85
                            let cond_reg = self.emit_ireg(cond, cur_func, vm);
86
                            
qinsoon's avatar
qinsoon committed
87 88 89
                            // emit: cmp cond_reg 1
                            self.backend.emit_cmp_r64_imm32(&cond_reg, 1);
                            // emit: je #branch_dest
qinsoon's avatar
qinsoon committed
90
                            self.backend.emit_je(branch_target);                            
qinsoon's avatar
qinsoon committed
91 92
                        } else {
                            unimplemented!();
93
                        }
94 95
                    },
                    
qinsoon's avatar
qinsoon committed
96
                    Instruction_::Branch1(ref dest) => {
qinsoon's avatar
qinsoon committed
97
                        let ops = inst.ops.read().unwrap();
98
                                            
99
                        self.process_dest(&ops, dest, cur_func, vm);
100
                        
qinsoon's avatar
qinsoon committed
101 102
                        let target = cur_func.content.as_ref().unwrap().get_block(dest.target);
                        
qinsoon's avatar
qinsoon committed
103
                        trace!("emit branch1");
104
                        // jmp
qinsoon's avatar
qinsoon committed
105
                        self.backend.emit_jmp(target);
106 107
                    },
                    
qinsoon's avatar
qinsoon committed
108 109
                    Instruction_::ExprCall{ref data, is_abort} => {
                        trace!("deal with pre-call convention");
110
                        
qinsoon's avatar
qinsoon committed
111
                        let ops = inst.ops.read().unwrap();
112 113 114 115
                        let rets = inst.value.as_ref().unwrap();
                        let ref func = ops[data.func];
                        let ref func_sig = match func.v {
                            TreeNode_::Value(ref pv) => {
qinsoon's avatar
qinsoon committed
116 117 118 119
                                let ty : &MuType = &pv.ty;
                                match ty.v {
                                    MuType_::FuncRef(ref sig)
                                    | MuType_::UFuncPtr(ref sig) => sig,
120 121 122 123 124 125 126 127 128
                                    _ => panic!("expected funcref/ptr type")
                                }
                            },
                            _ => panic!("expected funcref/ptr type")
                        };
                        
                        debug_assert!(func_sig.ret_tys.len() == data.args.len());
                        debug_assert!(func_sig.arg_tys.len() == rets.len());
                                                
qinsoon's avatar
qinsoon committed
129
                        let mut gpr_arg_count = 0;
130
                        // TODO: let mut fpr_arg_count = 0;
131 132 133
                        for arg_index in data.args.iter() {
                            let ref arg = ops[*arg_index];
                            trace!("arg {}", arg);
qinsoon's avatar
qinsoon committed
134 135
                            
                            if self.match_ireg(arg) {
136
                                let arg = self.emit_ireg(arg, cur_func, vm);
qinsoon's avatar
qinsoon committed
137
                                
138 139 140 141 142 143 144
                                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                                    self.backend.emit_mov_r64_r64(&x86_64::ARGUMENT_GPRs[gpr_arg_count], &arg);
                                    gpr_arg_count += 1;
                                } else {
                                    // use stack to pass argument
                                    unimplemented!();
                                }
qinsoon's avatar
qinsoon committed
145 146 147
                            } else if self.match_iimm(arg) {
                                let arg = self.emit_get_iimm(arg);
                                
148 149 150 151 152 153 154
                                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                                    self.backend.emit_mov_r64_imm32(&x86_64::ARGUMENT_GPRs[gpr_arg_count], arg);
                                    gpr_arg_count += 1;
                                } else {
                                    // use stack to pass argument
                                    unimplemented!();
                                }
qinsoon's avatar
qinsoon committed
155 156
                            } else {
                                unimplemented!();
157
                            }
158 159
                        }
                        
160 161
                        // check direct call or indirect
                        if self.match_funcref_const(func) {
qinsoon's avatar
qinsoon committed
162 163
                            let target_id = self.emit_get_funcref_const(func);
                            let funcs = vm.funcs().read().unwrap();
qinsoon's avatar
qinsoon committed
164
                            let target = funcs.get(&target_id).unwrap().read().unwrap();
qinsoon's avatar
qinsoon committed
165 166 167 168 169 170
                                                        
                            if vm.is_running() {
                                unimplemented!()
                            } else {
                                self.backend.emit_call_near_rel32(target.name().unwrap());
                            }
171
                        } else if self.match_ireg(func) {
172
                            let target = self.emit_ireg(func, cur_func, vm);
173 174 175 176 177 178 179 180 181
                            
                            self.backend.emit_call_near_r64(&target);
                        } else if self.match_mem(func) {
                            let target = self.emit_mem(func);
                            
                            self.backend.emit_call_near_mem64(&target);
                        } else {
                            unimplemented!();
                        }
182
                        
qinsoon's avatar
qinsoon committed
183
                        // deal with ret vals
184
                        let mut gpr_ret_count = 0;
185
                        // TODO: let mut fpr_ret_count = 0;
186 187 188 189 190 191 192 193
                        for val in rets {
                            if val.is_int_reg() {
                                if gpr_ret_count < x86_64::RETURN_GPRs.len() {
                                    self.backend.emit_mov_r64_r64(&val, &x86_64::RETURN_GPRs[gpr_ret_count]);
                                    gpr_ret_count += 1;
                                } else {
                                    // get return value by stack
                                    unimplemented!();
194
                                }
195 196 197
                            } else {
                                // floating point register
                                unimplemented!();
198
                            }
199
                        }
200 201 202
                    },
                    
                    Instruction_::Return(_) => {
203
                        self.emit_common_epilogue(inst, cur_func, vm);
204
                        
qinsoon's avatar
qinsoon committed
205
                        self.backend.emit_ret();
206 207
                    },
                    
qinsoon's avatar
qinsoon committed
208
                    Instruction_::BinOp(op, op1, op2) => {
qinsoon's avatar
qinsoon committed
209
                        let ops = inst.ops.read().unwrap();
qinsoon's avatar
qinsoon committed
210
                        
211 212
                        match op {
                            op::BinOp::Add => {
qinsoon's avatar
qinsoon committed
213 214 215
                                if self.match_ireg(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-ireg-ireg");
                                    
216 217
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func, vm);
                                    let reg_op2 = self.emit_ireg(&ops[op2], cur_func, vm);
qinsoon's avatar
qinsoon committed
218 219 220 221 222 223 224 225 226
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_add_r64_r64(&res_tmp, &reg_op2);
                                } else if self.match_ireg(&ops[op1]) && self.match_iimm(&ops[op2]) {
                                    trace!("emit add-ireg-imm");
                                    
227
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func, vm);
qinsoon's avatar
qinsoon committed
228 229 230 231 232 233 234 235 236 237 238 239 240
                                    let reg_op2 = self.emit_get_iimm(&ops[op2]);
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2, res
                                    self.backend.emit_add_r64_imm32(&res_tmp, reg_op2);
                                } else if self.match_iimm(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-imm-ireg");
                                    unimplemented!();
                                } else if self.match_ireg(&ops[op1]) && self.match_mem(&ops[op2]) {
                                    trace!("emit add-ireg-mem");
                                    
241
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func, vm);
qinsoon's avatar
qinsoon committed
242 243 244 245 246 247 248 249 250 251 252 253 254
                                    let reg_op2 = self.emit_mem(&ops[op2]);
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_add_r64_mem64(&res_tmp, &reg_op2);
                                } else if self.match_mem(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-mem-ireg");
                                    unimplemented!();
                                } else {
                                    unimplemented!()
                                }
255 256
                            },
                            op::BinOp::Sub => {
qinsoon's avatar
qinsoon committed
257 258 259
                                if self.match_ireg(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit sub-ireg-ireg");
                                    
260 261
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func, vm);
                                    let reg_op2 = self.emit_ireg(&ops[op2], cur_func, vm);
qinsoon's avatar
qinsoon committed
262 263 264 265 266 267 268 269 270
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_sub_r64_r64(&res_tmp, &reg_op2);
                                } else if self.match_ireg(&ops[op1]) && self.match_iimm(&ops[op2]) {
                                    trace!("emit sub-ireg-imm");

271
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func, vm);
272
                                    let imm_op2 = self.emit_get_iimm(&ops[op2]);
qinsoon's avatar
qinsoon committed
273 274 275 276 277
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2, res
278
                                    self.backend.emit_sub_r64_imm32(&res_tmp, imm_op2);
qinsoon's avatar
qinsoon committed
279 280 281 282 283 284
                                } else if self.match_iimm(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit sub-imm-ireg");
                                    unimplemented!();
                                } else if self.match_ireg(&ops[op1]) && self.match_mem(&ops[op2]) {
                                    trace!("emit sub-ireg-mem");
                                    
285
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func, vm);
286
                                    let mem_op2 = self.emit_mem(&ops[op2]);
qinsoon's avatar
qinsoon committed
287 288 289 290 291
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // sub op2 res
292
                                    self.backend.emit_sub_r64_mem64(&res_tmp, &mem_op2);
qinsoon's avatar
qinsoon committed
293 294 295 296 297 298
                                } else if self.match_mem(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-mem-ireg");
                                    unimplemented!();
                                } else {
                                    unimplemented!()
                                }
299 300
                            },
                            op::BinOp::Mul => {
301 302 303 304
                                // mov op1 -> rax
                                let rax = x86_64::RAX.clone();
                                let op1 = &ops[op1];
                                if self.match_ireg(op1) {
305
                                    let reg_op1 = self.emit_ireg(op1, cur_func, vm);
306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322
                                    
                                    self.backend.emit_mov_r64_r64(&rax, &reg_op1);
                                } else if self.match_iimm(op1) {
                                    let imm_op1 = self.emit_get_iimm(op1);
                                    
                                    self.backend.emit_mov_r64_imm32(&rax, imm_op1);
                                } else if self.match_mem(op1) {
                                    let mem_op1 = self.emit_mem(op1);
                                    
                                    self.backend.emit_mov_r64_mem64(&rax, &mem_op1);
                                } else {
                                    unimplemented!();
                                }
                                
                                // mul op2 -> rax
                                let op2 = &ops[op2];
                                if self.match_ireg(op2) {
323
                                    let reg_op2 = self.emit_ireg(op2, cur_func, vm);
324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345
                                    
                                    self.backend.emit_mul_r64(&reg_op2);
                                } else if self.match_iimm(op2) {
                                    let imm_op2 = self.emit_get_iimm(op2);
                                    
                                    // put imm in a temporary
                                    // here we use result reg as temporary
                                    let res_tmp = self.emit_get_result(node);
                                    self.backend.emit_mov_r64_imm32(&res_tmp, imm_op2);
                                    
                                    self.backend.emit_mul_r64(&res_tmp);
                                } else if self.match_mem(op2) {
                                    let mem_op2 = self.emit_mem(op2);
                                    
                                    self.backend.emit_mul_mem64(&mem_op2);
                                } else {
                                    unimplemented!();
                                }
                                
                                // mov rax -> result
                                let res_tmp = self.emit_get_result(node);
                                self.backend.emit_mov_r64_r64(&res_tmp, &rax);
346 347 348
                            },
                            
                            _ => unimplemented!()
349 350
                        }
                    }
351
                    
352 353
                    // load on x64 generates mov inst (no matter what order is specified)
                    // https://www.cl.cam.ac.uk/~pes20/cpp/cpp0xmappings.html
354
                    Instruction_::Load{is_ptr, order, mem_loc} => {
qinsoon's avatar
qinsoon committed
355
                        let ops = inst.ops.read().unwrap();
356
                        let ref loc_op = ops[mem_loc];
357 358 359 360 361 362 363 364 365
                        
                        // check order
                        match order {
                            MemoryOrder::Relaxed 
                            | MemoryOrder::Consume 
                            | MemoryOrder::Acquire
                            | MemoryOrder::SeqCst => {},
                            _ => panic!("didnt expect order {:?} with store inst", order)
                        }                        
366 367 368 369 370 371

                        let resolved_loc = self.emit_get_mem(loc_op, vm);                        
                        let res_temp = self.emit_get_result(node);
                        
                        if self.match_ireg(node) {
                            // emit mov(GPR)
372 373 374 375 376 377 378 379
                            self.backend.emit_mov_r64_mem64(&res_temp, &resolved_loc);
                        } else {
                            // emit mov(FPR)
                            unimplemented!()
                        }
                    }
                    
                    Instruction_::Store{is_ptr, order, mem_loc, value} => {
qinsoon's avatar
qinsoon committed
380
                        let ops = inst.ops.read().unwrap();
381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407
                        let ref loc_op = ops[mem_loc];
                        let ref val_op = ops[value];
                        
                        let generate_plain_mov : bool = {
                            match order {
                                MemoryOrder::Relaxed | MemoryOrder::Release => true,
                                MemoryOrder::SeqCst => false,
                                _ => panic!("didnt expect order {:?} with store inst", order)
                            }
                        };
                        
                        let resolved_loc = self.emit_get_mem(loc_op, vm);
                        
                        if self.match_ireg(val_op) {
                            let val = self.emit_ireg(val_op, cur_func, vm);
                            if generate_plain_mov {
                                self.backend.emit_mov_mem64_r64(&resolved_loc, &val);
                            } else {
                                unimplemented!()
                            }
                        } else if self.match_iimm(val_op) {
                            let val = self.emit_get_iimm(val_op);
                            if generate_plain_mov {
                                self.backend.emit_mov_mem64_imm32(&resolved_loc, val);
                            } else {
                                unimplemented!()
                            }
408 409 410 411 412
                        } else {
                            // emit mov(FPR)
                            unimplemented!()
                        }
                    }
413 414
                    
                    Instruction_::ThreadExit => {
415
                        // emit a call to swap_back_to_native_stack(sp_loc: Address)
416
                    }
417 418 419 420 421 422
    
                    _ => unimplemented!()
                } // main switch
            },
            
            TreeNode_::Value(ref p) => {
qinsoon's avatar
qinsoon committed
423

424 425 426 427 428
            }
        }
    }
    
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
429
    fn process_dest(&mut self, ops: &Vec<P<TreeNode>>, dest: &Destination, cur_func: &MuFunctionVersion, vm: &VM) {
430 431
        for i in 0..dest.args.len() {
            let ref dest_arg = dest.args[i];
432 433
            match dest_arg {
                &DestArg::Normal(op_index) => {
qinsoon's avatar
qinsoon committed
434
                    let ref arg = ops[op_index];
435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453
//                    match arg.op {
//                        OpCode::RegI64 
//                        | OpCode::RegFP
//                        | OpCode::IntImmI64
//                        | OpCode::FPImm => {
//                            // do nothing
//                        },
//                        _ => {
//                            trace!("nested: compute arg for branch");
//                            // nested: compute arg
//                            self.instruction_select(arg, cur_func);
//                            
//                            self.emit_get_result(arg);
//                        }
//                    }
//                    
                    let ref target_args = cur_func.content.as_ref().unwrap().get_block(dest.target).content.as_ref().unwrap().args;
                    let ref target_arg = target_args[i];
                    
454
                    self.emit_general_move(&arg, target_arg, cur_func, vm);
455 456 457 458
                },
                &DestArg::Freshbound(_) => unimplemented!()
            }
        }
qinsoon's avatar
qinsoon committed
459 460
    }
    
461
    fn emit_common_prologue(&mut self, args: &Vec<P<Value>>) {
462 463
        let block_name = "prologue".to_string();
        self.backend.start_block(block_name.clone());
464 465 466
        
        // no livein
        // liveout = entry block's args
467 468
        self.backend.set_block_livein(block_name.clone(), &vec![]);
        self.backend.set_block_liveout(block_name.clone(), args);
469
        
470 471 472
        // push rbp
        self.backend.emit_push_r64(&x86_64::RBP);
        // mov rsp -> rbp
qinsoon's avatar
qinsoon committed
473
        self.backend.emit_mov_r64_r64(&x86_64::RBP, &x86_64::RSP);
474
        
475
        // push all callee-saved registers
476 477 478 479 480 481
        for i in 0..x86_64::CALLEE_SAVED_GPRs.len() {
            let ref reg = x86_64::CALLEE_SAVED_GPRs[i];
            // not pushing rbp (as we have done taht)
            if reg.extract_ssa_id().unwrap() != x86_64::RBP.extract_ssa_id().unwrap() {
                self.backend.emit_push_r64(&reg);
            }
482 483 484 485
        }
        
        // unload arguments
        let mut gpr_arg_count = 0;
486
        // TODO: let mut fpr_arg_count = 0;
487 488 489 490 491 492 493 494 495 496 497 498 499 500 501
        for arg in args {
            if arg.is_int_reg() {
                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                    self.backend.emit_mov_r64_r64(&arg, &x86_64::ARGUMENT_GPRs[gpr_arg_count]);
                    gpr_arg_count += 1;
                } else {
                    // unload from stack
                    unimplemented!();
                }
            } else if arg.is_fp_reg() {
                unimplemented!();
            } else {
                panic!("expect an arg value to be either int reg or fp reg");
            }
        }
502 503
        
        self.backend.end_block(block_name);
504 505
    }
    
qinsoon's avatar
qinsoon committed
506
    fn emit_common_epilogue(&mut self, ret_inst: &Instruction, cur_func: &MuFunctionVersion, vm: &VM) {
507 508
        // epilogue is not a block (its a few instruction inserted before return)
        // FIXME: this may change in the future
509
        
510
        // prepare return regs
qinsoon's avatar
qinsoon committed
511
        let ref ops = ret_inst.ops.read().unwrap();
512 513 514 515 516 517
        let ret_val_indices = match ret_inst.v {
            Instruction_::Return(ref vals) => vals,
            _ => panic!("expected ret inst")
        };
        
        let mut gpr_ret_count = 0;
518
        // TODO: let mut fpr_ret_count = 0;
519 520 521
        for i in ret_val_indices {
            let ref ret_val = ops[*i];
            if self.match_ireg(ret_val) {
522
                let reg_ret_val = self.emit_ireg(ret_val, cur_func, vm);
523 524 525 526 527 528 529 530 531 532 533
                
                self.backend.emit_mov_r64_r64(&x86_64::RETURN_GPRs[gpr_ret_count], &reg_ret_val);
                gpr_ret_count += 1;
            } else if self.match_iimm(ret_val) {
                let imm_ret_val = self.emit_get_iimm(ret_val);
                
                self.backend.emit_mov_r64_imm32(&x86_64::RETURN_GPRs[gpr_ret_count], imm_ret_val);
                gpr_ret_count += 1;
            } else {
                unimplemented!();
            }
534 535 536 537 538 539 540 541
        }        
        
        // pop all callee-saved registers - reverse order
        for i in (0..x86_64::CALLEE_SAVED_GPRs.len()).rev() {
            let ref reg = x86_64::CALLEE_SAVED_GPRs[i];
            if reg.extract_ssa_id().unwrap() != x86_64::RBP.extract_ssa_id().unwrap() {
                self.backend.emit_pop_r64(&reg);
            }
542
        }
543 544 545
        
        // pop rbp
        self.backend.emit_pop_r64(&x86_64::RBP);
546 547
    }
    
qinsoon's avatar
qinsoon committed
548 549 550 551 552 553 554 555 556 557 558 559
    fn match_cmp_res(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Instruction(ref inst) => {
                match inst.v {
                    Instruction_::CmpOp(_, _, _) => true,
                    _ => false
                }
            }
            TreeNode_::Value(_) => false
        }
    }
    
qinsoon's avatar
qinsoon committed
560
    fn emit_cmp_res(&mut self, cond: &P<TreeNode>, cur_func: &MuFunctionVersion, vm: &VM) -> op::CmpOp {
qinsoon's avatar
qinsoon committed
561 562
        match cond.v {
            TreeNode_::Instruction(ref inst) => {
qinsoon's avatar
qinsoon committed
563
                let ops = inst.ops.read().unwrap();                
qinsoon's avatar
qinsoon committed
564 565 566 567 568 569 570 571
                
                match inst.v {
                    Instruction_::CmpOp(op, op1, op2) => {
                        let op1 = &ops[op1];
                        let op2 = &ops[op2];
                        
                        if op::is_int_cmp(op) {                        
                            if self.match_ireg(op1) && self.match_ireg(op2) {
572 573
                                let reg_op1 = self.emit_ireg(op1, cur_func, vm);
                                let reg_op2 = self.emit_ireg(op2, cur_func, vm);
qinsoon's avatar
qinsoon committed
574 575 576
                                
                                self.backend.emit_cmp_r64_r64(&reg_op1, &reg_op2);
                            } else if self.match_ireg(op1) && self.match_iimm(op2) {
577
                                let reg_op1 = self.emit_ireg(op1, cur_func, vm);
qinsoon's avatar
qinsoon committed
578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597
                                let iimm_op2 = self.emit_get_iimm(op2);
                                
                                self.backend.emit_cmp_r64_imm32(&reg_op1, iimm_op2);
                            } else {
                                unimplemented!()
                            }
                        } else {
                            unimplemented!()
                        }
                        
                        op
                    }
                    
                    _ => panic!("expect cmp res to emit")
                }
            }
            _ => panic!("expect cmp res to emit")
        }
    }    
    
qinsoon's avatar
qinsoon committed
598
    fn match_ireg(&mut self, op: &TreeNode) -> bool {
qinsoon's avatar
qinsoon committed
599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623
        match op.v {
            TreeNode_::Instruction(ref inst) => {
                if inst.value.is_some() {
                    if inst.value.as_ref().unwrap().len() > 1 {
                        return false;
                    }
                    
                    let ref value = inst.value.as_ref().unwrap()[0];
                    
                    if types::is_scalar(&value.ty) {
                        true
                    } else {
                        false
                    }
                } else {
                    false
                }
            }
            
            TreeNode_::Value(ref pv) => {
                pv.is_int_reg()
            }
        }
    }
    
qinsoon's avatar
qinsoon committed
624
    fn emit_ireg(&mut self, op: &P<TreeNode>, cur_func: &MuFunctionVersion, vm: &VM) -> P<Value> {
qinsoon's avatar
qinsoon committed
625 626
        match op.v {
            TreeNode_::Instruction(_) => {
627
                self.instruction_select(op, cur_func, vm);
qinsoon's avatar
qinsoon committed
628 629 630 631 632
                
                self.emit_get_result(op)
            },
            TreeNode_::Value(ref pv) => {
                match pv.v {
633
                    Value_::Constant(_)
634
                    | Value_::Global(_)
635
                    | Value_::Memory(_) => panic!("expected ireg"),
qinsoon's avatar
qinsoon committed
636 637
                    Value_::SSAVar(_) => {
                        pv.clone()
qinsoon's avatar
qinsoon committed
638
                    },
qinsoon's avatar
qinsoon committed
639 640 641 642 643
                }
            }
        }
    }
    
644
    #[allow(unused_variables)]
645 646 647 648
    fn match_fpreg(&mut self, op: &P<TreeNode>) -> bool {
        unimplemented!()
    }
    
qinsoon's avatar
qinsoon committed
649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669
    fn match_iimm(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Value(ref pv) if x86_64::is_valid_x86_imm(pv) => true,
            _ => false
        }
    }
    
    fn emit_get_iimm(&mut self, op: &P<TreeNode>) -> u32 {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::Int(val)) => {
                        val as u32
                    },
                    _ => panic!("expected iimm")
                }
            },
            _ => panic!("expected iimm")
        }
    }
    
qinsoon's avatar
qinsoon committed
670
    fn emit_get_mem(&mut self, op: &P<TreeNode>, vm: &VM) -> P<Value> {
671 672 673 674
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::SSAVar(_) => P(Value{
675
                        hdr: MuEntityHeader::unnamed(vm.next_id()),
676 677 678 679 680 681 682 683
                        ty: types::get_referent_ty(& pv.ty).unwrap(),
                        v: Value_::Memory(MemoryLocation::Address{
                            base: pv.clone(),
                            offset: None,
                            index: None,
                            scale: None
                        })
                    }),
684
                    Value_::Global(_) => {
685 686 687 688 689 690
                        if vm.is_running() {
                            // get address from vm
                            unimplemented!()
                        } else {
                            // symbolic
                            P(Value{
691
                                hdr: MuEntityHeader::unnamed(vm.next_id()),
692 693 694
                                ty: types::get_referent_ty(&pv.ty).unwrap(),
                                v: Value_::Memory(MemoryLocation::Symbolic{
                                    base: Some(x86_64::RIP.clone()),
695
                                    label: pv.name().unwrap()
696 697 698 699 700 701 702 703 704 705 706 707
                                })
                            })
                        }
                    },
                    Value_::Memory(_) => pv.clone(),
                    Value_::Constant(_) => unimplemented!()
                }
            }
            TreeNode_::Instruction(_) => unimplemented!()
        }
    }
    
708 709 710 711 712 713 714 715 716 717 718 719 720
    fn match_funcref_const(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::FuncRef(_)) => true,
                    Value_::Constant(Constant::UFuncRef(_)) => true,
                    _ => false
                }
            },
            _ => false 
        }
    }
    
qinsoon's avatar
qinsoon committed
721
    fn emit_get_funcref_const(&mut self, op: &P<TreeNode>) -> MuID {
722 723 724
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
qinsoon's avatar
qinsoon committed
725 726
                    Value_::Constant(Constant::FuncRef(id))
                    | Value_::Constant(Constant::UFuncRef(id)) => id,
727 728 729 730 731 732 733
                    _ => panic!("expected a (u)funcref const")
                }
            },
            _ => panic!("expected a (u)funcref const")
        }
    }
    
734
    #[allow(unused_variables)]
735 736 737 738
    fn match_mem(&mut self, op: &P<TreeNode>) -> bool {
        unimplemented!()
    }
    
739
    #[allow(unused_variables)]
740 741 742 743
    fn emit_mem(&mut self, op: &P<TreeNode>) -> P<Value> {
        unimplemented!()
    }
    
qinsoon's avatar
qinsoon committed
744
    fn emit_get_result(&mut self, node: &TreeNode) -> P<Value> {
qinsoon's avatar
qinsoon committed
745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763
        match node.v {
            TreeNode_::Instruction(ref inst) => {
                if inst.value.is_some() {
                    if inst.value.as_ref().unwrap().len() > 1 {
                        panic!("expected ONE result from the node {}", node);
                    }
                    
                    let ref value = inst.value.as_ref().unwrap()[0];
                    
                    value.clone()
                } else {
                    panic!("expected result from the node {}", node);
                }
            }
            
            TreeNode_::Value(ref pv) => {
                pv.clone()
            }
        }
764 765
    }
    
qinsoon's avatar
qinsoon committed
766
    fn emit_general_move(&mut self, src: &P<TreeNode>, dest: &P<Value>, cur_func: &MuFunctionVersion, vm: &VM) {
767 768 769 770
        let ref dst_ty = dest.ty;
        
        if !types::is_fp(dst_ty) && types::is_scalar(dst_ty) {
            if self.match_ireg(src) {
771
                let src_reg = self.emit_ireg(src, cur_func, vm);
772 773 774 775 776 777 778 779 780 781 782 783 784
                self.backend.emit_mov_r64_r64(dest, &src_reg);
            } else if self.match_iimm(src) {
                let src_imm = self.emit_get_iimm(src);
                self.backend.emit_mov_r64_imm32(dest, src_imm);
            } else {
                panic!("expected an int type op");
            }
        } else if !types::is_fp(dst_ty) && types::is_scalar(dst_ty) {
            unimplemented!()
        } else {
            panic!("unexpected type for move");
        } 
    }
785
}
786

787 788 789
impl CompilerPass for InstructionSelection {
    fn name(&self) -> &'static str {
        self.name
790
    }
791

792
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
793
    fn start_function(&mut self, vm: &VM, func_ver: &mut MuFunctionVersion) {
794
        debug!("{}", self.name());
795
        
qinsoon's avatar
qinsoon committed
796
        let funcs = vm.funcs().read().unwrap();
qinsoon's avatar
qinsoon committed
797
        let func = funcs.get(&func_ver.func_id).unwrap().read().unwrap();
qinsoon's avatar
qinsoon committed
798
        self.backend.start_code(func.name().unwrap());
799 800
        
        // prologue (get arguments from entry block first)        
qinsoon's avatar
qinsoon committed
801
        let entry_block = func_ver.content.as_ref().unwrap().get_entry_block();
802 803
        let ref args = entry_block.content.as_ref().unwrap().args;
        self.emit_common_prologue(args);
804 805 806
    }

    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
807
    fn visit_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
qinsoon's avatar
qinsoon committed
808 809
        for block_id in func.block_trace.as_ref().unwrap() {
            let block = func.content.as_ref().unwrap().get_block(*block_id);
810
            let block_label = block.name().unwrap();
811
            
812
            self.backend.start_block(block_label.clone());
813

814
            let block_content = block.content.as_ref().unwrap();
815 816
            
            // live in is args of the block
817
            self.backend.set_block_livein(block_label.clone(), &block_content.args);
818 819 820
            
            // live out is the union of all branch args of this block
            let live_out = block_content.get_out_arguments();
821
            self.backend.set_block_liveout(block_label.clone(), &live_out);
822

823
            for inst in block_content.body.iter() {
qinsoon's avatar
qinsoon committed
824
                self.instruction_select(&inst, func, vm);
825
            }
826
            
qinsoon's avatar
qinsoon committed
827
            self.backend.end_block(block_label);
828 829
        }
    }
830 831
    
    #[allow(unused_variables)]
qinsoon's avatar
qinsoon committed
832
    fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
833 834
        self.backend.print_cur_code();
        
835 836
        let mc = self.backend.finish_code();
        let compiled_func = CompiledFunction {
qinsoon's avatar
qinsoon committed
837
            func_id: func.func_id,
838
            func_ver_id: func.id(),
839
            temps: HashMap::new(),
840 841 842
            mc: mc
        };
        
qinsoon's avatar
qinsoon committed
843
        vm.add_compiled_func(compiled_func);
844
    }
845
}