To protect your data, the CISO officer has suggested users to enable GitLab 2FA as soon as possible.

inst_sel.rs 31.1 KB
Newer Older
1
use ast::ir::*;
2
use ast::ptr::*;
qinsoon's avatar
qinsoon committed
3
use ast::inst::Instruction;
4
5
use ast::inst::Destination;
use ast::inst::DestArg;
qinsoon's avatar
qinsoon committed
6
use ast::inst::Instruction_;
7
use ast::op;
qinsoon's avatar
qinsoon committed
8
use ast::types;
9
use ast::types::MuType_;
10
use vm::context::VMContext;
11
use vm::machine_code::CompiledFunction;
12
13

use compiler::CompilerPass;
qinsoon's avatar
qinsoon committed
14
15
16
use compiler::backend::x86_64;
use compiler::backend::x86_64::CodeGenerator;
use compiler::backend::x86_64::ASMCodeGen;
17

18
19
use std::collections::HashMap;

20
pub struct InstructionSelection {
21
22
    name: &'static str,
    
qinsoon's avatar
qinsoon committed
23
    backend: Box<CodeGenerator>
24
25
}

26
27
28
29
30
31
32
33
34
35
36
37
38
39
impl <'a> InstructionSelection {
    pub fn new() -> InstructionSelection {
        InstructionSelection{
            name: "Instruction Selection (x64)",
            backend: Box::new(ASMCodeGen::new())
        }
    }
    
    // in this pass, we assume that
    // 1. all temporaries will use 64bit registers
    // 2. we do not need to backup/restore caller-saved registers
    // 3. we need to backup/restore all the callee-saved registers
    // if any of these assumption breaks, we will need to re-emit the code
    #[allow(unused_variables)]
40
    fn instruction_select(&mut self, node: &'a P<TreeNode>, cur_func: &MuFunctionVersion) {
qinsoon's avatar
qinsoon committed
41
42
43
        trace!("instsel on node {}", node);
        
        match node.v {
44
45
            TreeNode_::Instruction(ref inst) => {
                match inst.v {
qinsoon's avatar
qinsoon committed
46
47
48
                    Instruction_::Branch2{cond, ref true_dest, ref false_dest, true_prob} => {
                        // move this to trace generation
                        // assert here
49
50
51
52
53
                        let (fallthrough_dest, branch_dest, branch_if_true) = {
                            if true_prob > 0.5f32 {
                                (true_dest, false_dest, false)
                            } else {
                                (false_dest, true_dest, true)
54
                            }
55
                        };
56
                        
qinsoon's avatar
qinsoon committed
57
                        let ops = inst.ops.borrow();
58
                        
59
60
                        self.process_dest(&ops, fallthrough_dest, cur_func);
                        self.process_dest(&ops, branch_dest, cur_func);
61
62
63
    
                        let ref cond = ops[cond];
                        
qinsoon's avatar
qinsoon committed
64
65
                        if self.match_cmp_res(cond) {
                            trace!("emit cmp_eq-branch2");
66
                            match self.emit_cmp_res(cond, cur_func) {
qinsoon's avatar
qinsoon committed
67
68
                                op::CmpOp::EQ => self.backend.emit_je(branch_dest),
                                op::CmpOp::NE => self.backend.emit_jne(branch_dest),
qinsoon's avatar
qinsoon committed
69
70
71
72
73
74
75
76
                                op::CmpOp::UGE => self.backend.emit_jae(branch_dest),
                                op::CmpOp::UGT => self.backend.emit_ja(branch_dest),
                                op::CmpOp::ULE => self.backend.emit_jbe(branch_dest),
                                op::CmpOp::ULT => self.backend.emit_jb(branch_dest),
                                op::CmpOp::SGE => self.backend.emit_jge(branch_dest),
                                op::CmpOp::SGT => self.backend.emit_jg(branch_dest),
                                op::CmpOp::SLE => self.backend.emit_jle(branch_dest),
                                op::CmpOp::SLT => self.backend.emit_jl(branch_dest),
qinsoon's avatar
qinsoon committed
77
78
79
80
                                _ => unimplemented!()
                            }
                        } else if self.match_ireg(cond) {
                            trace!("emit ireg-branch2");
81
                            
82
                            let cond_reg = self.emit_ireg(cond, cur_func);
83
                            
qinsoon's avatar
qinsoon committed
84
85
86
87
88
89
                            // emit: cmp cond_reg 1
                            self.backend.emit_cmp_r64_imm32(&cond_reg, 1);
                            // emit: je #branch_dest
                            self.backend.emit_je(branch_dest);                            
                        } else {
                            unimplemented!();
90
                        }
91
92
                    },
                    
qinsoon's avatar
qinsoon committed
93
94
                    Instruction_::Branch1(ref dest) => {
                        let ops = inst.ops.borrow();
95
                                            
96
                        self.process_dest(&ops, dest, cur_func);
97
                        
qinsoon's avatar
qinsoon committed
98
                        trace!("emit branch1");
99
                        // jmp
qinsoon's avatar
qinsoon committed
100
                        self.backend.emit_jmp(dest);
101
102
                    },
                    
qinsoon's avatar
qinsoon committed
103
104
                    Instruction_::ExprCall{ref data, is_abort} => {
                        trace!("deal with pre-call convention");
105
                        
qinsoon's avatar
qinsoon committed
106
                        let ops = inst.ops.borrow();
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
                        let rets = inst.value.as_ref().unwrap();
                        let ref func = ops[data.func];
                        let ref func_sig = match func.v {
                            TreeNode_::Value(ref pv) => {
                                let ty : &MuType_ = &pv.ty;
                                match ty {
                                    &MuType_::FuncRef(ref sig)
                                    | &MuType_::UFuncPtr(ref sig) => sig,
                                    _ => panic!("expected funcref/ptr type")
                                }
                            },
                            _ => panic!("expected funcref/ptr type")
                        };
                        
                        debug_assert!(func_sig.ret_tys.len() == data.args.len());
                        debug_assert!(func_sig.arg_tys.len() == rets.len());
                                                
qinsoon's avatar
qinsoon committed
124
                        let mut gpr_arg_count = 0;
125
                        // TODO: let mut fpr_arg_count = 0;
126
127
128
                        for arg_index in data.args.iter() {
                            let ref arg = ops[*arg_index];
                            trace!("arg {}", arg);
qinsoon's avatar
qinsoon committed
129
130
                            
                            if self.match_ireg(arg) {
131
                                let arg = self.emit_ireg(arg, cur_func);
qinsoon's avatar
qinsoon committed
132
                                
133
134
135
136
137
138
139
                                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                                    self.backend.emit_mov_r64_r64(&x86_64::ARGUMENT_GPRs[gpr_arg_count], &arg);
                                    gpr_arg_count += 1;
                                } else {
                                    // use stack to pass argument
                                    unimplemented!();
                                }
qinsoon's avatar
qinsoon committed
140
141
142
                            } else if self.match_iimm(arg) {
                                let arg = self.emit_get_iimm(arg);
                                
143
144
145
146
147
148
149
                                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                                    self.backend.emit_mov_r64_imm32(&x86_64::ARGUMENT_GPRs[gpr_arg_count], arg);
                                    gpr_arg_count += 1;
                                } else {
                                    // use stack to pass argument
                                    unimplemented!();
                                }
qinsoon's avatar
qinsoon committed
150
151
                            } else {
                                unimplemented!();
152
                            }
153
154
                        }
                        
155
156
157
158
159
160
                        // check direct call or indirect
                        if self.match_funcref_const(func) {
                            let target = self.emit_get_funcref_const(func);
                            
                            self.backend.emit_call_near_rel32(target);
                        } else if self.match_ireg(func) {
161
                            let target = self.emit_ireg(func, cur_func);
162
163
164
165
166
167
168
169
170
                            
                            self.backend.emit_call_near_r64(&target);
                        } else if self.match_mem(func) {
                            let target = self.emit_mem(func);
                            
                            self.backend.emit_call_near_mem64(&target);
                        } else {
                            unimplemented!();
                        }
171
                        
qinsoon's avatar
qinsoon committed
172
                        // deal with ret vals
173
                        let mut gpr_ret_count = 0;
174
                        // TODO: let mut fpr_ret_count = 0;
175
176
177
178
179
180
181
182
                        for val in rets {
                            if val.is_int_reg() {
                                if gpr_ret_count < x86_64::RETURN_GPRs.len() {
                                    self.backend.emit_mov_r64_r64(&val, &x86_64::RETURN_GPRs[gpr_ret_count]);
                                    gpr_ret_count += 1;
                                } else {
                                    // get return value by stack
                                    unimplemented!();
183
                                }
184
185
186
                            } else {
                                // floating point register
                                unimplemented!();
187
                            }
188
                        }
189
190
191
                    },
                    
                    Instruction_::Return(_) => {
192
                        self.emit_common_epilogue(inst, cur_func);
193
                        
qinsoon's avatar
qinsoon committed
194
                        self.backend.emit_ret();
195
196
                    },
                    
qinsoon's avatar
qinsoon committed
197
198
199
                    Instruction_::BinOp(op, op1, op2) => {
                        let ops = inst.ops.borrow();
                        
200
201
                        match op {
                            op::BinOp::Add => {
qinsoon's avatar
qinsoon committed
202
203
204
                                if self.match_ireg(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-ireg-ireg");
                                    
205
206
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
                                    let reg_op2 = self.emit_ireg(&ops[op2], cur_func);
qinsoon's avatar
qinsoon committed
207
208
209
210
211
212
213
214
215
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_add_r64_r64(&res_tmp, &reg_op2);
                                } else if self.match_ireg(&ops[op1]) && self.match_iimm(&ops[op2]) {
                                    trace!("emit add-ireg-imm");
                                    
216
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
qinsoon's avatar
qinsoon committed
217
218
219
220
221
222
223
224
225
226
227
228
229
                                    let reg_op2 = self.emit_get_iimm(&ops[op2]);
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2, res
                                    self.backend.emit_add_r64_imm32(&res_tmp, reg_op2);
                                } else if self.match_iimm(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-imm-ireg");
                                    unimplemented!();
                                } else if self.match_ireg(&ops[op1]) && self.match_mem(&ops[op2]) {
                                    trace!("emit add-ireg-mem");
                                    
230
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
qinsoon's avatar
qinsoon committed
231
232
233
234
235
236
237
238
239
240
241
242
243
                                    let reg_op2 = self.emit_mem(&ops[op2]);
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_add_r64_mem64(&res_tmp, &reg_op2);
                                } else if self.match_mem(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-mem-ireg");
                                    unimplemented!();
                                } else {
                                    unimplemented!()
                                }
244
245
                            },
                            op::BinOp::Sub => {
qinsoon's avatar
qinsoon committed
246
247
248
                                if self.match_ireg(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit sub-ireg-ireg");
                                    
249
250
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
                                    let reg_op2 = self.emit_ireg(&ops[op2], cur_func);
qinsoon's avatar
qinsoon committed
251
252
253
254
255
256
257
258
259
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2 res
                                    self.backend.emit_sub_r64_r64(&res_tmp, &reg_op2);
                                } else if self.match_ireg(&ops[op1]) && self.match_iimm(&ops[op2]) {
                                    trace!("emit sub-ireg-imm");

260
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
261
                                    let imm_op2 = self.emit_get_iimm(&ops[op2]);
qinsoon's avatar
qinsoon committed
262
263
264
265
266
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // add op2, res
267
                                    self.backend.emit_sub_r64_imm32(&res_tmp, imm_op2);
qinsoon's avatar
qinsoon committed
268
269
270
271
272
273
                                } else if self.match_iimm(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit sub-imm-ireg");
                                    unimplemented!();
                                } else if self.match_ireg(&ops[op1]) && self.match_mem(&ops[op2]) {
                                    trace!("emit sub-ireg-mem");
                                    
274
                                    let reg_op1 = self.emit_ireg(&ops[op1], cur_func);
275
                                    let mem_op2 = self.emit_mem(&ops[op2]);
qinsoon's avatar
qinsoon committed
276
277
278
279
280
                                    let res_tmp = self.emit_get_result(node);
                                    
                                    // mov op1, res
                                    self.backend.emit_mov_r64_r64(&res_tmp, &reg_op1);
                                    // sub op2 res
281
                                    self.backend.emit_sub_r64_mem64(&res_tmp, &mem_op2);
qinsoon's avatar
qinsoon committed
282
283
284
285
286
287
                                } else if self.match_mem(&ops[op1]) && self.match_ireg(&ops[op2]) {
                                    trace!("emit add-mem-ireg");
                                    unimplemented!();
                                } else {
                                    unimplemented!()
                                }
288
289
                            },
                            op::BinOp::Mul => {
290
291
292
293
                                // mov op1 -> rax
                                let rax = x86_64::RAX.clone();
                                let op1 = &ops[op1];
                                if self.match_ireg(op1) {
294
                                    let reg_op1 = self.emit_ireg(op1, cur_func);
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
                                    
                                    self.backend.emit_mov_r64_r64(&rax, &reg_op1);
                                } else if self.match_iimm(op1) {
                                    let imm_op1 = self.emit_get_iimm(op1);
                                    
                                    self.backend.emit_mov_r64_imm32(&rax, imm_op1);
                                } else if self.match_mem(op1) {
                                    let mem_op1 = self.emit_mem(op1);
                                    
                                    self.backend.emit_mov_r64_mem64(&rax, &mem_op1);
                                } else {
                                    unimplemented!();
                                }
                                
                                // mul op2 -> rax
                                let op2 = &ops[op2];
                                if self.match_ireg(op2) {
312
                                    let reg_op2 = self.emit_ireg(op2, cur_func);
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
                                    
                                    self.backend.emit_mul_r64(&reg_op2);
                                } else if self.match_iimm(op2) {
                                    let imm_op2 = self.emit_get_iimm(op2);
                                    
                                    // put imm in a temporary
                                    // here we use result reg as temporary
                                    let res_tmp = self.emit_get_result(node);
                                    self.backend.emit_mov_r64_imm32(&res_tmp, imm_op2);
                                    
                                    self.backend.emit_mul_r64(&res_tmp);
                                } else if self.match_mem(op2) {
                                    let mem_op2 = self.emit_mem(op2);
                                    
                                    self.backend.emit_mul_mem64(&mem_op2);
                                } else {
                                    unimplemented!();
                                }
                                
                                // mov rax -> result
                                let res_tmp = self.emit_get_result(node);
                                self.backend.emit_mov_r64_r64(&res_tmp, &rax);
335
336
337
                            },
                            
                            _ => unimplemented!()
338
339
                        }
                    }
340
341
342
343
344
345
    
                    _ => unimplemented!()
                } // main switch
            },
            
            TreeNode_::Value(ref p) => {
qinsoon's avatar
qinsoon committed
346

347
348
349
350
351
            }
        }
    }
    
    #[allow(unused_variables)]
352
    fn process_dest(&mut self, ops: &Vec<P<TreeNode>>, dest: &Destination, cur_func: &MuFunctionVersion) {
353
354
        for i in 0..dest.args.len() {
            let ref dest_arg = dest.args[i];
355
356
            match dest_arg {
                &DestArg::Normal(op_index) => {
qinsoon's avatar
qinsoon committed
357
                    let ref arg = ops[op_index];
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
//                    match arg.op {
//                        OpCode::RegI64 
//                        | OpCode::RegFP
//                        | OpCode::IntImmI64
//                        | OpCode::FPImm => {
//                            // do nothing
//                        },
//                        _ => {
//                            trace!("nested: compute arg for branch");
//                            // nested: compute arg
//                            self.instruction_select(arg, cur_func);
//                            
//                            self.emit_get_result(arg);
//                        }
//                    }
//                    
                    let ref target_args = cur_func.content.as_ref().unwrap().get_block(dest.target).content.as_ref().unwrap().args;
                    let ref target_arg = target_args[i];
                    
                    self.emit_general_move(&arg, target_arg, cur_func);
378
379
380
381
                },
                &DestArg::Freshbound(_) => unimplemented!()
            }
        }
qinsoon's avatar
qinsoon committed
382
383
    }
    
384
    fn emit_common_prologue(&mut self, args: &Vec<P<Value>>) {
385
386
387
388
389
390
391
        let block_name = "prologue";
        self.backend.start_block(block_name);
        
        // no livein
        // liveout = entry block's args
        self.backend.set_block_livein(block_name, &vec![]);
        self.backend.set_block_liveout(block_name, args);
qinsoon's avatar
qinsoon committed
392
        
393
394
395
        // push rbp
        self.backend.emit_push_r64(&x86_64::RBP);
        // mov rsp -> rbp
qinsoon's avatar
qinsoon committed
396
        self.backend.emit_mov_r64_r64(&x86_64::RBP, &x86_64::RSP);
397
        
398
        // push all callee-saved registers
399
400
401
402
403
404
        for i in 0..x86_64::CALLEE_SAVED_GPRs.len() {
            let ref reg = x86_64::CALLEE_SAVED_GPRs[i];
            // not pushing rbp (as we have done taht)
            if reg.extract_ssa_id().unwrap() != x86_64::RBP.extract_ssa_id().unwrap() {
                self.backend.emit_push_r64(&reg);
            }
405
406
407
408
        }
        
        // unload arguments
        let mut gpr_arg_count = 0;
409
        // TODO: let mut fpr_arg_count = 0;
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
        for arg in args {
            if arg.is_int_reg() {
                if gpr_arg_count < x86_64::ARGUMENT_GPRs.len() {
                    self.backend.emit_mov_r64_r64(&arg, &x86_64::ARGUMENT_GPRs[gpr_arg_count]);
                    gpr_arg_count += 1;
                } else {
                    // unload from stack
                    unimplemented!();
                }
            } else if arg.is_fp_reg() {
                unimplemented!();
            } else {
                panic!("expect an arg value to be either int reg or fp reg");
            }
        }
425
426
        
        self.backend.end_block(block_name);
427
428
    }
    
429
    fn emit_common_epilogue(&mut self, ret_inst: &Instruction, cur_func: &MuFunctionVersion) {
430
431
        // epilogue is not a block (its a few instruction inserted before return)
        // FIXME: this may change in the future
432
        
433
        // prepare return regs
434
435
436
437
438
439
440
        let ref ops = ret_inst.ops.borrow();
        let ret_val_indices = match ret_inst.v {
            Instruction_::Return(ref vals) => vals,
            _ => panic!("expected ret inst")
        };
        
        let mut gpr_ret_count = 0;
441
        // TODO: let mut fpr_ret_count = 0;
442
443
444
        for i in ret_val_indices {
            let ref ret_val = ops[*i];
            if self.match_ireg(ret_val) {
445
                let reg_ret_val = self.emit_ireg(ret_val, cur_func);
446
447
448
449
450
451
452
453
454
455
456
                
                self.backend.emit_mov_r64_r64(&x86_64::RETURN_GPRs[gpr_ret_count], &reg_ret_val);
                gpr_ret_count += 1;
            } else if self.match_iimm(ret_val) {
                let imm_ret_val = self.emit_get_iimm(ret_val);
                
                self.backend.emit_mov_r64_imm32(&x86_64::RETURN_GPRs[gpr_ret_count], imm_ret_val);
                gpr_ret_count += 1;
            } else {
                unimplemented!();
            }
457
458
459
460
461
462
463
464
        }        
        
        // pop all callee-saved registers - reverse order
        for i in (0..x86_64::CALLEE_SAVED_GPRs.len()).rev() {
            let ref reg = x86_64::CALLEE_SAVED_GPRs[i];
            if reg.extract_ssa_id().unwrap() != x86_64::RBP.extract_ssa_id().unwrap() {
                self.backend.emit_pop_r64(&reg);
            }
465
        }
466
467
468
        
        // pop rbp
        self.backend.emit_pop_r64(&x86_64::RBP);
469
470
    }
    
qinsoon's avatar
qinsoon committed
471
472
473
474
475
476
477
478
479
480
481
482
    fn match_cmp_res(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Instruction(ref inst) => {
                match inst.v {
                    Instruction_::CmpOp(_, _, _) => true,
                    _ => false
                }
            }
            TreeNode_::Value(_) => false
        }
    }
    
483
    fn emit_cmp_res(&mut self, cond: &P<TreeNode>, cur_func: &MuFunctionVersion) -> op::CmpOp {
qinsoon's avatar
qinsoon committed
484
485
486
487
488
489
490
491
492
493
494
        match cond.v {
            TreeNode_::Instruction(ref inst) => {
                let ops = inst.ops.borrow();                
                
                match inst.v {
                    Instruction_::CmpOp(op, op1, op2) => {
                        let op1 = &ops[op1];
                        let op2 = &ops[op2];
                        
                        if op::is_int_cmp(op) {                        
                            if self.match_ireg(op1) && self.match_ireg(op2) {
495
496
                                let reg_op1 = self.emit_ireg(op1, cur_func);
                                let reg_op2 = self.emit_ireg(op2, cur_func);
qinsoon's avatar
qinsoon committed
497
498
499
                                
                                self.backend.emit_cmp_r64_r64(&reg_op1, &reg_op2);
                            } else if self.match_ireg(op1) && self.match_iimm(op2) {
500
                                let reg_op1 = self.emit_ireg(op1, cur_func);
qinsoon's avatar
qinsoon committed
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
                                let iimm_op2 = self.emit_get_iimm(op2);
                                
                                self.backend.emit_cmp_r64_imm32(&reg_op1, iimm_op2);
                            } else {
                                unimplemented!()
                            }
                        } else {
                            unimplemented!()
                        }
                        
                        op
                    }
                    
                    _ => panic!("expect cmp res to emit")
                }
            }
            _ => panic!("expect cmp res to emit")
        }
    }    
    
    fn match_ireg(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Instruction(ref inst) => {
                if inst.value.is_some() {
                    if inst.value.as_ref().unwrap().len() > 1 {
                        return false;
                    }
                    
                    let ref value = inst.value.as_ref().unwrap()[0];
                    
                    if types::is_scalar(&value.ty) {
                        true
                    } else {
                        false
                    }
                } else {
                    false
                }
            }
            
            TreeNode_::Value(ref pv) => {
                pv.is_int_reg()
            }
        }
    }
    
547
    fn emit_ireg(&mut self, op: &P<TreeNode>, cur_func: &MuFunctionVersion) -> P<Value> {
qinsoon's avatar
qinsoon committed
548
549
        match op.v {
            TreeNode_::Instruction(_) => {
550
                self.instruction_select(op, cur_func);
qinsoon's avatar
qinsoon committed
551
552
553
554
555
556
557
558
559
560
561
562
563
564
                
                self.emit_get_result(op)
            },
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(_) => panic!("expected ireg"),
                    Value_::SSAVar(_) => {
                        pv.clone()
                    }
                }
            }
        }
    }
    
565
    #[allow(unused_variables)]
566
567
568
569
    fn match_fpreg(&mut self, op: &P<TreeNode>) -> bool {
        unimplemented!()
    }
    
qinsoon's avatar
qinsoon committed
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
    fn match_iimm(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Value(ref pv) if x86_64::is_valid_x86_imm(pv) => true,
            _ => false
        }
    }
    
    fn emit_get_iimm(&mut self, op: &P<TreeNode>) -> u32 {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::Int(val)) => {
                        val as u32
                    },
                    _ => panic!("expected iimm")
                }
            },
            _ => panic!("expected iimm")
        }
    }
    
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
    fn match_funcref_const(&mut self, op: &P<TreeNode>) -> bool {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::FuncRef(_)) => true,
                    Value_::Constant(Constant::UFuncRef(_)) => true,
                    _ => false
                }
            },
            _ => false 
        }
    }
    
    fn emit_get_funcref_const(&mut self, op: &P<TreeNode>) -> MuTag {
        match op.v {
            TreeNode_::Value(ref pv) => {
                match pv.v {
                    Value_::Constant(Constant::FuncRef(tag))
                    | Value_::Constant(Constant::UFuncRef(tag)) => tag,
                    _ => panic!("expected a (u)funcref const")
                }
            },
            _ => panic!("expected a (u)funcref const")
        }
    }
    
617
    #[allow(unused_variables)]
618
619
620
621
    fn match_mem(&mut self, op: &P<TreeNode>) -> bool {
        unimplemented!()
    }
    
622
    #[allow(unused_variables)]
623
624
625
626
    fn emit_mem(&mut self, op: &P<TreeNode>) -> P<Value> {
        unimplemented!()
    }
    
qinsoon's avatar
qinsoon committed
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
    fn emit_get_result(&mut self, node: &P<TreeNode>) -> P<Value> {
        match node.v {
            TreeNode_::Instruction(ref inst) => {
                if inst.value.is_some() {
                    if inst.value.as_ref().unwrap().len() > 1 {
                        panic!("expected ONE result from the node {}", node);
                    }
                    
                    let ref value = inst.value.as_ref().unwrap()[0];
                    
                    value.clone()
                } else {
                    panic!("expected result from the node {}", node);
                }
            }
            
            TreeNode_::Value(ref pv) => {
                pv.clone()
            }
        }
647
648
    }
    
649
    fn emit_general_move(&mut self, src: &P<TreeNode>, dest: &P<Value>, cur_func: &MuFunctionVersion) {
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
        let ref dst_ty = dest.ty;
        
        if !types::is_fp(dst_ty) && types::is_scalar(dst_ty) {
            if self.match_ireg(src) {
                let src_reg = self.emit_ireg(src, cur_func);
                self.backend.emit_mov_r64_r64(dest, &src_reg);
            } else if self.match_iimm(src) {
                let src_imm = self.emit_get_iimm(src);
                self.backend.emit_mov_r64_imm32(dest, src_imm);
            } else {
                panic!("expected an int type op");
            }
        } else if !types::is_fp(dst_ty) && types::is_scalar(dst_ty) {
            unimplemented!()
        } else {
            panic!("unexpected type for move");
        } 
    }
668
}
669

670
671
672
impl CompilerPass for InstructionSelection {
    fn name(&self) -> &'static str {
        self.name
673
    }
674

675
    #[allow(unused_variables)]
676
    fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
677
        debug!("{}", self.name());
qinsoon's avatar
qinsoon committed
678
        
qinsoon's avatar
qinsoon committed
679
680
681
        self.backend.start_code(func.fn_name);
        
        // prologue (get arguments from entry block first)        
682
683
684
        let entry_block = func.content.as_ref().unwrap().get_entry_block();
        let ref args = entry_block.content.as_ref().unwrap().args;
        self.emit_common_prologue(args);
685
686
687
    }

    #[allow(unused_variables)]
688
    fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
689
        for block_label in func.block_trace.as_ref().unwrap() {
690
            let block = func.content.as_ref().unwrap().get_block(block_label);
qinsoon's avatar
qinsoon committed
691
692
            
            self.backend.start_block(block.label);
693

694
            let block_content = block.content.as_ref().unwrap();
695
696
697
698
699
700
701
            
            // live in is args of the block
            self.backend.set_block_livein(block.label, &block_content.args);
            
            // live out is the union of all branch args of this block
            let live_out = block_content.get_out_arguments();
            self.backend.set_block_liveout(block.label, &live_out);
702

703
704
            for inst in block_content.body.iter() {
                self.instruction_select(inst, func);
705
            }
706
707
            
            self.backend.end_block(block.label);
708
709
        }
    }
qinsoon's avatar
qinsoon committed
710
711
    
    #[allow(unused_variables)]
712
    fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
qinsoon's avatar
qinsoon committed
713
714
        self.backend.print_cur_code();
        
715
716
717
        let mc = self.backend.finish_code();
        let compiled_func = CompiledFunction {
            fn_name: func.fn_name,
718
            temps: HashMap::new(),
719
720
721
722
            mc: mc
        };
        
        vm_context.add_compiled_func(compiled_func);
qinsoon's avatar
qinsoon committed
723
    }
724
}