Research GitLab has introduced a user quota limitation. The new rule limits each user to have 50 Gb. The quota doesn't restrict group projects. If you have any concern with this, please talk to CECS Gitlab Admin at N110 (b) CSIT building.

inst_sel.rs 30.6 KB
Newer Older
1
use ast::ir::*;
2
use ast::ptr::*;
qinsoon's avatar
qinsoon committed
3
use ast::inst::Instruction;
4 5
use ast::inst::Destination;
use ast::inst::DestArg;
qinsoon's avatar
qinsoon committed
6
use ast::inst::Instruction_;
7
use ast::op;
qinsoon's avatar
qinsoon committed
8
use ast::types;
9
use ast::types::MuType_;
10
use vm::context::VMContext;
11
use vm::machine_code::CompiledFunction;
12 13

use compiler::CompilerPass;
qinsoon's avatar
qinsoon committed
14 15 16
use compiler::backend::x86_64;
use compiler::backend::x86_64::CodeGenerator;
use compiler::backend::x86_64::ASMCodeGen;
17 18

pub struct InstructionSelection {
19 20
    name: &'static str,
    
qinsoon's avatar
qinsoon committed
21
    backend: Box<CodeGenerator>
22 23
}

24 25 26 27 28 29 30 31 32 33 34 35 36 37
impl <'a> InstructionSelection {
    pub fn new() -> InstructionSelection {
        InstructionSelection{
            name: "Instruction Selection (x64)",
            backend: Box::new(ASMCodeGen::new())
        }
    }
    
    // in this pass, we assume that
    // 1. all temporaries will use 64bit registers
    // 2. we do not need to backup/restore caller-saved registers
    // 3. we need to backup/restore all the callee-saved registers
    // if any of these assumption breaks, we will need to re-emit the code
    #[allow(unused_variables)]
38
    fn instruction_select(&mut self, node: &'a P<TreeNode>, cur_func: &MuFunction) {
qinsoon's avatar
qinsoon committed
39 40 41
        trace!("instsel on node {}", node);
        
        match node.v {
42 43
            TreeNode_::Instruction(ref inst) => {
                match inst.v {
qinsoon's avatar
qinsoon committed
44 45 46
                    Instruction_::Branch2{cond, ref true_dest, ref false_dest, true_prob} => {
                        // move this to trace generation
                        // assert here
47 48 49 50 51
                        let (fallthrough_dest, branch_dest, branch_if_true) = {
                            if true_prob > 0.5f32 {
                                (true_dest, false_dest, false)
                            } else {
                                (false_dest, true_dest, true)
52
                            }
53
                        };
54
                        
qinsoon's avatar
qinsoon committed
55
                        let ops = inst.ops.borrow();
56
                        
57 58
                        self.process_dest(&ops, fallthrough_dest, cur_func);
                        self.process_dest(&ops, branch_dest, cur_func);
59 60 61
    
                        let ref cond = ops[cond];
                        
qinsoon's avatar
qinsoon committed
62 63
                        if self.match_cmp_res(cond) {
                            trace!("emit cmp_eq-branch2");
64
                            match self.emit_cmp_res(cond, cur_func) {
qinsoon's avatar
qinsoon committed
65 66
                                op::CmpOp::EQ => self.backend.emit_je(branch_dest),
                                op::CmpOp::NE => self.backend.emit_jne(branch_dest),
67 68 69 70 71 72 73 74
                                op::CmpOp::UGE => self.backend.emit_jae(branch_dest),
                                op::CmpOp::UGT => self.backend.emit_ja(branch_dest),
                                op::CmpOp::ULE => self.backend.emit_jbe(branch_dest),
                                op::CmpOp::ULT => self.backend.emit_jb(branch_dest),
                                op::CmpOp::SGE => self.backend.emit_jge(branch_dest),
                                op::CmpOp::SGT => self.backend.emit_jg(branch_dest),
                                op::CmpOp::SLE => self.backend.emit_jle(branch_dest),
                                op::CmpOp::SLT => self.backend.emit_jl(branch_dest),
qinsoon's avatar
qinsoon committed
75 76 77 78
                                _ => unimplemented!()
                            }
                        } else if self.match_ireg(cond) {
                            trace!("emit ireg-branch2");
79
                            
80
                            let cond_reg = self.emit_ireg(cond, cur_func);
81
                            
qinsoon's avatar
qinsoon committed
82 83 84 85 86 87
                            // emit: cmp cond_reg 1
                            self.backend.emit_cmp_r64_imm32(&cond_reg, 1);
                            // emit: je #branch_dest
                            self.backend.emit_je(branch_dest);                            
                        } else {
                            unimplemented!();
88
                        }
89 90
                    },
                    
qinsoon's avatar
qinsoon committed
91 92
                    Instruction_::Branch1(ref dest) => {
                        let ops = inst.ops.borrow();
93
                                            
94
                        self.process_dest(&ops, dest, cur_func);
95
                        
qinsoon's avatar
qinsoon committed
96
                        trace!("emit branch1");
97
                        // jmp
qinsoon's avatar
qinsoon committed
98
                        self.backend.emit_jmp(dest);
99 100
                    },
                    
qinsoon's avatar
qinsoon committed
101 102
                    Instruction_::ExprCall{ref data, is_abort} => {
                        trace!("deal with pre-call convention");
103
                        
qinsoon's avatar
qinsoon committed
104
                        let ops = inst.ops.borrow();
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121
                        let rets = inst.value.as_ref().unwrap();
                        let ref func = ops[data.func];
                        let ref func_sig = match func.v {
                            TreeNode_::Value(ref pv) => {
                                let ty : &MuType_ = &pv.ty;
                                match ty {
                                    &MuType_::FuncRef(ref sig)
                                    | &MuType_::UFuncPtr(ref sig) => sig,
                                    _ => panic!("expected funcref/ptr type")
                                }
                            },
                            _ => panic!("expected funcref/ptr type")
                        };
                        
                        debug_assert!(func_sig.ret_tys.len() == data.args.len());
                        debug_assert!(func_sig.arg_tys.len() == rets.len());
                                                
qinsoon's avatar
qinsoon committed
122
                        let mut gpr_arg_count = 0;
123
                        // TODO: let mut fpr_arg_count = 0;
124 125 126
                        for arg_index in data.args.iter() {
                            let ref arg = ops[*arg_index];
                            trace!("arg {}", arg);
qinsoon's avatar
qinsoon committed
127 128
                            
                            if self.match_ireg(arg) {
129
                                let arg = self.emit_ireg(arg, cur_func);
qinsoon's avatar
qinsoon committed
130
                                
131 132 133 134 135 136 137
                                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                                    self.backend.emit_mov_r64_r64(&x86_64::ARGUMENT_GPRs[gpr_arg_count], &arg);
                                    gpr_arg_count += 1;
                                } else {
                                    // use stack to pass argument
                                    unimplemented!();
                                }
qinsoon's avatar
qinsoon committed
138 139 140
                            } else if self.match_iimm(arg) {
                                let arg = self.emit_get_iimm(arg);
                                
141 142 143 144 145 146 147
                                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                                    self.backend.emit_mov_r64_imm32(&x86_64::ARGUMENT_GPRs[gpr_arg_count], arg);
                                    gpr_arg_count += 1;
                                } else {
                                    // use stack to pass argument
                                    unimplemented!();
                                }
qinsoon's avatar
qinsoon committed
148 149
                            } else {
                                unimplemented!();
150
                            }
151 152
                        }
                        
153 154 155 156 157 158
                        // check direct call or indirect
                        if self.match_funcref_const(func) {
                            let target = self.emit_get_funcref_const(func);
                            
                            self.backend.emit_call_near_rel32(target);
                        } else if self.match_ireg(func) {
159
                            let target = self.emit_ireg(func, cur_func);
160 161 162 163 164 165 166 167 168
                            
                            self.backend.emit_call_near_r64(&target);
                        } else if self.match_mem(func) {
                            let target = self.emit_mem(func);
                            
                            self.backend.emit_call_near_mem64(&target);
                        } else {
                            unimplemented!();
                        }
169
                        
qinsoon's avatar
qinsoon committed
170
                        // deal with ret vals
171
                        let mut gpr_ret_count = 0;
172
                        // TODO: let mut fpr_ret_count = 0;
173 174 175 176 177 178 179 180
                        for val in rets {
                            if val.is_int_reg() {
                                if gpr_ret_count < x86_64::RETURN_GPRs.len() {
                                    self.backend.emit_mov_r64_r64(&val, &x86_64::RETURN_GPRs[gpr_ret_count]);
                                    gpr_ret_count += 1;
                                } else {
                                    // get return value by stack
                                    unimplemented!();
181
                                }
182 183 184
                            } else {
                                // floating point register
                                unimplemented!();
185
                            }
186
                        }
187 188 189
                    },
                    
                    Instruction_::Return(_) => {
190
                        self.emit_common_epilogue(inst, cur_func);
191
                        
qinsoon's avatar
qinsoon committed
192
                        self.backend.emit_ret();
193 194
                    },
                    
qinsoon's avatar
qinsoon committed
195 196 197
                    Instruction_::BinOp(op, op1, op2) => {
                        let ops = inst.ops.borrow();
                        
198 199
                        match op {
                            op::BinOp::Add => {
qinsoon's avatar
qinsoon committed
200 201 202
                                if self.match_ireg(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-ireg-ireg");
                                    
203 204
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
                                    let reg_op2 = self.emit_ireg(&ops[op2], cur_func);
qinsoon's avatar
qinsoon committed
205 206 207 208 209 210 211 212 213
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_add_r64_r64(&res_tmp, &reg_op2);
                                } else if self.match_ireg(&ops[op1]) && self.match_iimm(&ops[op2]) {
                                    trace!("emit add-ireg-imm");
                                    
214
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
qinsoon's avatar
qinsoon committed
215 216 217 218 219 220 221 222 223 224 225 226 227
                                    let reg_op2 = self.emit_get_iimm(&ops[op2]);
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2, res
                                    self.backend.emit_add_r64_imm32(&res_tmp, reg_op2);
                                } else if self.match_iimm(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-imm-ireg");
                                    unimplemented!();
                                } else if self.match_ireg(&ops[op1]) && self.match_mem(&ops[op2]) {
                                    trace!("emit add-ireg-mem");
                                    
228
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
qinsoon's avatar
qinsoon committed
229 230 231 232 233 234 235 236 237 238 239 240 241
                                    let reg_op2 = self.emit_mem(&ops[op2]);
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_add_r64_mem64(&res_tmp, &reg_op2);
                                } else if self.match_mem(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-mem-ireg");
                                    unimplemented!();
                                } else {
                                    unimplemented!()
                                }
242 243
                            },
                            op::BinOp::Sub => {
qinsoon's avatar
qinsoon committed
244 245 246
                                if self.match_ireg(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit sub-ireg-ireg");
                                    
247 248
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
                                    let reg_op2 = self.emit_ireg(&ops[op2], cur_func);
qinsoon's avatar
qinsoon committed
249 250 251 252 253 254 255 256 257
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_sub_r64_r64(&res_tmp, &reg_op2);
                                } else if self.match_ireg(&ops[op1]) && self.match_iimm(&ops[op2]) {
                                    trace!("emit sub-ireg-imm");

258
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
259
                                    let imm_op2 = self.emit_get_iimm(&ops[op2]);
qinsoon's avatar
qinsoon committed
260 261 262 263 264
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2, res
265
                                    self.backend.emit_sub_r64_imm32(&res_tmp, imm_op2);
qinsoon's avatar
qinsoon committed
266 267 268 269 270 271
                                } else if self.match_iimm(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit sub-imm-ireg");
                                    unimplemented!();
                                } else if self.match_ireg(&ops[op1]) && self.match_mem(&ops[op2]) {
                                    trace!("emit sub-ireg-mem");
                                    
272
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
273
                                    let mem_op2 = self.emit_mem(&ops[op2]);
qinsoon's avatar
qinsoon committed
274 275 276 277 278
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // sub op2 res
279
                                    self.backend.emit_sub_r64_mem64(&res_tmp, &mem_op2);
qinsoon's avatar
qinsoon committed
280 281 282 283 284 285
                                } else if self.match_mem(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-mem-ireg");
                                    unimplemented!();
                                } else {
                                    unimplemented!()
                                }
286 287
                            },
                            op::BinOp::Mul => {
288 289 290 291
                                // mov op1 -> rax
                                let rax = x86_64::RAX.clone();
                                let op1 = &ops[op1];
                                if self.match_ireg(op1) {
292
                                    let reg_op1 = self.emit_ireg(op1, cur_func);
293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309
                                    
                                    self.backend.emit_mov_r64_r64(&rax, &reg_op1);
                                } else if self.match_iimm(op1) {
                                    let imm_op1 = self.emit_get_iimm(op1);
                                    
                                    self.backend.emit_mov_r64_imm32(&rax, imm_op1);
                                } else if self.match_mem(op1) {
                                    let mem_op1 = self.emit_mem(op1);
                                    
                                    self.backend.emit_mov_r64_mem64(&rax, &mem_op1);
                                } else {
                                    unimplemented!();
                                }
                                
                                // mul op2 -> rax
                                let op2 = &ops[op2];
                                if self.match_ireg(op2) {
310
                                    let reg_op2 = self.emit_ireg(op2, cur_func);
311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332
                                    
                                    self.backend.emit_mul_r64(&reg_op2);
                                } else if self.match_iimm(op2) {
                                    let imm_op2 = self.emit_get_iimm(op2);
                                    
                                    // put imm in a temporary
                                    // here we use result reg as temporary
                                    let res_tmp = self.emit_get_result(node);
                                    self.backend.emit_mov_r64_imm32(&res_tmp, imm_op2);
                                    
                                    self.backend.emit_mul_r64(&res_tmp);
                                } else if self.match_mem(op2) {
                                    let mem_op2 = self.emit_mem(op2);
                                    
                                    self.backend.emit_mul_mem64(&mem_op2);
                                } else {
                                    unimplemented!();
                                }
                                
                                // mov rax -> result
                                let res_tmp = self.emit_get_result(node);
                                self.backend.emit_mov_r64_r64(&res_tmp, &rax);
333 334 335
                            },
                            
                            _ => unimplemented!()
336 337
                        }
                    }
338 339 340 341 342 343
    
                    _ => unimplemented!()
                } // main switch
            },
            
            TreeNode_::Value(ref p) => {
qinsoon's avatar
qinsoon committed
344

345 346 347 348 349
            }
        }
    }
    
    #[allow(unused_variables)]
350 351 352
    fn process_dest(&mut self, ops: &Vec<P<TreeNode>>, dest: &Destination, cur_func: &MuFunction) {
        for i in 0..dest.args.len() {
            let ref dest_arg = dest.args[i];
353 354
            match dest_arg {
                &DestArg::Normal(op_index) => {
qinsoon's avatar
qinsoon committed
355
                    let ref arg = ops[op_index];
356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375
//                    match arg.op {
//                        OpCode::RegI64 
//                        | OpCode::RegFP
//                        | OpCode::IntImmI64
//                        | OpCode::FPImm => {
//                            // do nothing
//                        },
//                        _ => {
//                            trace!("nested: compute arg for branch");
//                            // nested: compute arg
//                            self.instruction_select(arg, cur_func);
//                            
//                            self.emit_get_result(arg);
//                        }
//                    }
//                    
                    let ref target_args = cur_func.content.as_ref().unwrap().get_block(dest.target).content.as_ref().unwrap().args;
                    let ref target_arg = target_args[i];
                    
                    self.emit_general_move(&arg, target_arg, cur_func);
376 377 378 379
                },
                &DestArg::Freshbound(_) => unimplemented!()
            }
        }
qinsoon's avatar
qinsoon committed
380 381
    }
    
382
    fn emit_common_prologue(&mut self, args: &Vec<P<Value>>) {
383 384
        self.backend.start_block("prologue");
        
385 386 387
        // push rbp
        self.backend.emit_push_r64(&x86_64::RBP);
        // mov rsp -> rbp
qinsoon's avatar
qinsoon committed
388
        self.backend.emit_mov_r64_r64(&x86_64::RBP, &x86_64::RSP);
389
        
390
        // push all callee-saved registers
391 392 393 394 395 396
        for i in 0..x86_64::CALLEE_SAVED_GPRs.len() {
            let ref reg = x86_64::CALLEE_SAVED_GPRs[i];
            // not pushing rbp (as we have done taht)
            if reg.extract_ssa_id().unwrap() != x86_64::RBP.extract_ssa_id().unwrap() {
                self.backend.emit_push_r64(&reg);
            }
397 398 399 400
        }
        
        // unload arguments
        let mut gpr_arg_count = 0;
401
        // TODO: let mut fpr_arg_count = 0;
402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418
        for arg in args {
            if arg.is_int_reg() {
                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                    self.backend.emit_mov_r64_r64(&arg, &x86_64::ARGUMENT_GPRs[gpr_arg_count]);
                    gpr_arg_count += 1;
                } else {
                    // unload from stack
                    unimplemented!();
                }
            } else if arg.is_fp_reg() {
                unimplemented!();
            } else {
                panic!("expect an arg value to be either int reg or fp reg");
            }
        }
    }
    
419 420
    fn emit_common_epilogue(&mut self, ret_inst: &Instruction, cur_func: &MuFunction) {
        self.backend.start_block("epilogue");
421
        
422
        // prepare return regs
423 424 425 426 427 428 429
        let ref ops = ret_inst.ops.borrow();
        let ret_val_indices = match ret_inst.v {
            Instruction_::Return(ref vals) => vals,
            _ => panic!("expected ret inst")
        };
        
        let mut gpr_ret_count = 0;
430
        // TODO: let mut fpr_ret_count = 0;
431 432 433
        for i in ret_val_indices {
            let ref ret_val = ops[*i];
            if self.match_ireg(ret_val) {
434
                let reg_ret_val = self.emit_ireg(ret_val, cur_func);
435 436 437 438 439 440 441 442 443 444 445
                
                self.backend.emit_mov_r64_r64(&x86_64::RETURN_GPRs[gpr_ret_count], &reg_ret_val);
                gpr_ret_count += 1;
            } else if self.match_iimm(ret_val) {
                let imm_ret_val = self.emit_get_iimm(ret_val);
                
                self.backend.emit_mov_r64_imm32(&x86_64::RETURN_GPRs[gpr_ret_count], imm_ret_val);
                gpr_ret_count += 1;
            } else {
                unimplemented!();
            }
446 447 448 449 450 451 452 453
        }        
        
        // pop all callee-saved registers - reverse order
        for i in (0..x86_64::CALLEE_SAVED_GPRs.len()).rev() {
            let ref reg = x86_64::CALLEE_SAVED_GPRs[i];
            if reg.extract_ssa_id().unwrap() != x86_64::RBP.extract_ssa_id().unwrap() {
                self.backend.emit_pop_r64(&reg);
            }
454
        }
455 456 457
        
        // pop rbp
        self.backend.emit_pop_r64(&x86_64::RBP);
458 459
    }
    
qinsoon's avatar
qinsoon committed
460 461 462 463 464 465 466 467 468 469 470 471
    fn match_cmp_res(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Instruction(ref inst) => {
                match inst.v {
                    Instruction_::CmpOp(_, _, _) => true,
                    _ => false
                }
            }
            TreeNode_::Value(_) => false
        }
    }
    
472
    fn emit_cmp_res(&mut self, cond: &P<TreeNode>, cur_func: &MuFunction) -> op::CmpOp {
qinsoon's avatar
qinsoon committed
473 474 475 476 477 478 479 480 481 482 483
        match cond.v {
            TreeNode_::Instruction(ref inst) => {
                let ops = inst.ops.borrow();                
                
                match inst.v {
                    Instruction_::CmpOp(op, op1, op2) => {
                        let op1 = &ops[op1];
                        let op2 = &ops[op2];
                        
                        if op::is_int_cmp(op) {                        
                            if self.match_ireg(op1) && self.match_ireg(op2) {
484 485
                                let reg_op1 = self.emit_ireg(op1, cur_func);
                                let reg_op2 = self.emit_ireg(op2, cur_func);
qinsoon's avatar
qinsoon committed
486 487 488
                                
                                self.backend.emit_cmp_r64_r64(&reg_op1, &reg_op2);
                            } else if self.match_ireg(op1) && self.match_iimm(op2) {
489
                                let reg_op1 = self.emit_ireg(op1, cur_func);
qinsoon's avatar
qinsoon committed
490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535
                                let iimm_op2 = self.emit_get_iimm(op2);
                                
                                self.backend.emit_cmp_r64_imm32(&reg_op1, iimm_op2);
                            } else {
                                unimplemented!()
                            }
                        } else {
                            unimplemented!()
                        }
                        
                        op
                    }
                    
                    _ => panic!("expect cmp res to emit")
                }
            }
            _ => panic!("expect cmp res to emit")
        }
    }    
    
    fn match_ireg(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Instruction(ref inst) => {
                if inst.value.is_some() {
                    if inst.value.as_ref().unwrap().len() > 1 {
                        return false;
                    }
                    
                    let ref value = inst.value.as_ref().unwrap()[0];
                    
                    if types::is_scalar(&value.ty) {
                        true
                    } else {
                        false
                    }
                } else {
                    false
                }
            }
            
            TreeNode_::Value(ref pv) => {
                pv.is_int_reg()
            }
        }
    }
    
536
    fn emit_ireg(&mut self, op: &P<TreeNode>, cur_func: &MuFunction) -> P<Value> {
qinsoon's avatar
qinsoon committed
537 538
        match op.v {
            TreeNode_::Instruction(_) => {
539
                self.instruction_select(op, cur_func);
qinsoon's avatar
qinsoon committed
540 541 542 543 544 545 546 547 548 549 550 551 552 553
                
                self.emit_get_result(op)
            },
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(_) => panic!("expected ireg"),
                    Value_::SSAVar(_) => {
                        pv.clone()
                    }
                }
            }
        }
    }
    
554
    #[allow(unused_variables)]
555 556 557 558
    fn match_fpreg(&mut self, op: &P<TreeNode>) -> bool {
        unimplemented!()
    }
    
qinsoon's avatar
qinsoon committed
559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579
    fn match_iimm(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Value(ref pv) if x86_64::is_valid_x86_imm(pv) => true,
            _ => false
        }
    }
    
    fn emit_get_iimm(&mut self, op: &P<TreeNode>) -> u32 {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::Int(val)) => {
                        val as u32
                    },
                    _ => panic!("expected iimm")
                }
            },
            _ => panic!("expected iimm")
        }
    }
    
580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605
    fn match_funcref_const(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::FuncRef(_)) => true,
                    Value_::Constant(Constant::UFuncRef(_)) => true,
                    _ => false
                }
            },
            _ => false 
        }
    }
    
    fn emit_get_funcref_const(&mut self, op: &P<TreeNode>) -> MuTag {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::FuncRef(tag))
                    | Value_::Constant(Constant::UFuncRef(tag)) => tag,
                    _ => panic!("expected a (u)funcref const")
                }
            },
            _ => panic!("expected a (u)funcref const")
        }
    }
    
606
    #[allow(unused_variables)]
607 608 609 610
    fn match_mem(&mut self, op: &P<TreeNode>) -> bool {
        unimplemented!()
    }
    
611
    #[allow(unused_variables)]
612 613 614 615
    fn emit_mem(&mut self, op: &P<TreeNode>) -> P<Value> {
        unimplemented!()
    }
    
qinsoon's avatar
qinsoon committed
616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635
    fn emit_get_result(&mut self, node: &P<TreeNode>) -> P<Value> {
        match node.v {
            TreeNode_::Instruction(ref inst) => {
                if inst.value.is_some() {
                    if inst.value.as_ref().unwrap().len() > 1 {
                        panic!("expected ONE result from the node {}", node);
                    }
                    
                    let ref value = inst.value.as_ref().unwrap()[0];
                    
                    value.clone()
                } else {
                    panic!("expected result from the node {}", node);
                }
            }
            
            TreeNode_::Value(ref pv) => {
                pv.clone()
            }
        }
636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656
    }
    
    fn emit_general_move(&mut self, src: &P<TreeNode>, dest: &P<Value>, cur_func: &MuFunction) {
        let ref dst_ty = dest.ty;
        
        if !types::is_fp(dst_ty) && types::is_scalar(dst_ty) {
            if self.match_ireg(src) {
                let src_reg = self.emit_ireg(src, cur_func);
                self.backend.emit_mov_r64_r64(dest, &src_reg);
            } else if self.match_iimm(src) {
                let src_imm = self.emit_get_iimm(src);
                self.backend.emit_mov_r64_imm32(dest, src_imm);
            } else {
                panic!("expected an int type op");
            }
        } else if !types::is_fp(dst_ty) && types::is_scalar(dst_ty) {
            unimplemented!()
        } else {
            panic!("unexpected type for move");
        } 
    }
657
}
658

659 660 661
impl CompilerPass for InstructionSelection {
    fn name(&self) -> &'static str {
        self.name
662
    }
663

664 665 666
    #[allow(unused_variables)]
    fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
        debug!("{}", self.name());
667
        
668 669 670
        self.backend.start_code(func.fn_name);
        
        // prologue (get arguments from entry block first)        
671 672 673
        let entry_block = func.content.as_ref().unwrap().get_entry_block();
        let ref args = entry_block.content.as_ref().unwrap().args;
        self.emit_common_prologue(args);
674 675 676 677 678
    }

    #[allow(unused_variables)]
    fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
        for block_label in func.block_trace.as_ref().unwrap() {
679
            let block = func.content.as_ref().unwrap().get_block(block_label);
680 681
            
            self.backend.start_block(block.label);
682

683
            let block_content = block.content.as_ref().unwrap();
684 685 686 687 688 689 690
            
            // live in is args of the block
            self.backend.set_block_livein(block.label, &block_content.args);
            
            // live out is the union of all branch args of this block
            let live_out = block_content.get_out_arguments();
            self.backend.set_block_liveout(block.label, &live_out);
691

692 693
            for inst in block_content.body.iter() {
                self.instruction_select(inst, func);
694
            }
695 696
        }
    }
697 698 699 700 701
    
    #[allow(unused_variables)]
    fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
        self.backend.print_cur_code();
        
702 703 704 705 706 707 708
        let mc = self.backend.finish_code();
        let compiled_func = CompiledFunction {
            fn_name: func.fn_name,
            mc: mc
        };
        
        vm_context.add_compiled_func(compiled_func);
709
    }
710
}