asm_backend.rs 43.7 KB
Newer Older
1 2
#![allow(unused_variables)]

3
use compiler::backend;
4 5
use compiler::backend::AOT_EMIT_CONTEXT_FILE;
use compiler::backend::AOT_EMIT_DIR;
qinsoon's avatar
qinsoon committed
6
use compiler::backend::RegGroup;
qinsoon's avatar
qinsoon committed
7
use utils::ByteSize;
8
use compiler::backend::x86_64;
9
use compiler::backend::x86_64::CodeGenerator;
qinsoon's avatar
qinsoon committed
10
use compiler::machine_code::MachineCode;
qinsoon's avatar
qinsoon committed
11
use vm::VM;
qinsoon's avatar
qinsoon committed
12
use runtime::ValueLocation;
13

qinsoon's avatar
qinsoon committed
14
use utils::vec_utils;
15 16
use utils::string_utils;

17 18 19
use ast::ptr::P;
use ast::ir::*;

20 21
use std::collections::HashMap;
use std::str;
22
use std::usize;
23
use std::slice::Iter;
24
use std::ops;
25 26

struct ASMCode {
qinsoon's avatar
qinsoon committed
27
    name: MuName, 
28 29
    code: Vec<ASM>,
    reg_defines: HashMap<MuID, Vec<ASMLocation>>,
30 31
    reg_uses: HashMap<MuID, Vec<ASMLocation>>,
    
qinsoon's avatar
qinsoon committed
32 33
    mem_op_used: HashMap<usize, bool>,
    
34 35 36
    preds: Vec<Vec<usize>>,
    succs: Vec<Vec<usize>>,
    
qinsoon's avatar
qinsoon committed
37 38 39 40
    idx_to_blk: HashMap<usize, MuName>,
    blk_to_idx: HashMap<MuName, usize>,
    cond_branches: HashMap<usize, MuName>,
    branches: HashMap<usize, MuName>,
41
    
qinsoon's avatar
qinsoon committed
42 43 44
    blocks: Vec<MuName>,
    block_start: HashMap<MuName, usize>,
    block_range: HashMap<MuName, ops::Range<usize>>,
45
    
qinsoon's avatar
qinsoon committed
46 47
    block_livein: HashMap<MuName, Vec<MuID>>,
    block_liveout: HashMap<MuName, Vec<MuID>>
48 49
}

qinsoon's avatar
qinsoon committed
50 51 52
unsafe impl Send for ASMCode {} 
unsafe impl Sync for ASMCode {}

53 54 55 56
impl MachineCode for ASMCode {
    fn number_of_insts(&self) -> usize {
        self.code.len()
    }
57
    
58 59 60 61 62 63 64 65
    fn is_move(&self, index: usize) -> bool {
        let inst = self.code.get(index);
        match inst {
            Some(inst) => inst.code.starts_with("mov"),
            None => false
        }
    }
    
qinsoon's avatar
qinsoon committed
66 67 68 69
    fn is_using_mem_op(&self, index: usize) -> bool {
        *self.mem_op_used.get(&index).unwrap()
    }
    
qinsoon's avatar
qinsoon committed
70 71 72 73 74 75 76 77
    fn get_succs(&self, index: usize) -> &Vec<usize> {
        &self.succs[index]
    }
    
    fn get_preds(&self, index: usize) -> &Vec<usize> {
        &self.preds[index]
    }
    
78
    fn get_inst_reg_uses(&self, index: usize) -> &Vec<MuID> {
qinsoon's avatar
qinsoon committed
79
        &self.code[index].uses
80 81
    }
    
82
    fn get_inst_reg_defines(&self, index: usize) -> &Vec<MuID> {
qinsoon's avatar
qinsoon committed
83
        &self.code[index].defines
84
    }
85
    
86
    fn replace_reg(&mut self, from: MuID, to: MuID) {
87
        let to_reg_tag : MuName = backend::all_regs().get(&to).unwrap().name().unwrap();
88
        let to_reg_string = "%".to_string() + &to_reg_tag;
89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114
        
        match self.reg_defines.get(&from) {
            Some(defines) => {
                for loc in defines {
                    let ref mut inst_to_patch = self.code[loc.line];
                    for i in 0..loc.len {
                        string_utils::replace(&mut inst_to_patch.code, loc.index, &to_reg_string, to_reg_string.len());
                    }
                }
            },
            None => {}
        }
        
        match self.reg_uses.get(&from) {
            Some(uses) => {
                for loc in uses {
                    let ref mut inst_to_patch = self.code[loc.line];
                    for i in 0..loc.len {
                        string_utils::replace(&mut inst_to_patch.code, loc.index, &to_reg_string, to_reg_string.len());
                    }   
                }
            },
            None => {}
        }
    }
    
115
    fn set_inst_nop(&mut self, index: usize) {
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153
        // FIXME: changing these info is inefficient - plus we probably do not need to

        // remove any reg use of this instruction
        // clone the vec otherwise since we need to borrow 'self' again
        for reg in self.get_inst_reg_uses(index).to_vec() {
            let mut locs = self.reg_uses.get_mut(&reg).unwrap();
            let mut new_locs : Vec<ASMLocation> = vec![];

            while !locs.is_empty() {
                let loc = locs.pop().unwrap();
                if loc.line != index {
                    new_locs.push(loc);
                }
            }

            debug_assert!(locs.is_empty());
            locs.append(&mut new_locs);
        }

        // remove any reg define of this instruction
        for reg in self.get_inst_reg_defines(index).to_vec() {
            let mut locs = self.reg_defines.get_mut(&reg).unwrap();
            let mut new_locs : Vec<ASMLocation> = vec![];

            while !locs.is_empty() {
                let loc = locs.pop().unwrap();
                if loc.line != index {
                    new_locs.push(loc);
                }
            }

            debug_assert!(locs.is_empty());
            locs.append(&mut new_locs);
        }

        // nop doesnt use memop
        self.mem_op_used.insert(index, false);

154 155 156 157
        self.code.remove(index);
        self.code.insert(index, ASM::nop());
    }
    
158 159 160 161 162 163 164 165 166 167 168
    fn emit(&self) -> Vec<u8> {
        let mut ret = vec![];
        
        for inst in self.code.iter() {
            ret.append(&mut inst.code.clone().into_bytes());
            ret.append(&mut "\n".to_string().into_bytes());
        }
        
        ret
    }
    
169 170
    fn trace_mc(&self) {
        trace!("");
171

172 173
        trace!("code for {}: \n", self.name);
        
174 175
        let n_insts = self.code.len();
        for i in 0..n_insts {
176
            self.trace_inst(i);
177 178
        }
        
179 180 181 182 183 184 185
        trace!("")      
    }
    
    fn trace_inst(&self, i: usize) {
        trace!("#{}\t{:30}\t\tdefine: {:?}\tuses: {:?}\tpred: {:?}\tsucc: {:?}", 
            i, self.code[i].code, self.get_inst_reg_defines(i), self.get_inst_reg_uses(i),
            self.preds[i], self.succs[i]);
186
    }
187
    
188 189
    fn get_ir_block_livein(&self, block: &str) -> Option<&Vec<MuID>> {
        self.block_livein.get(&block.to_string())
190 191
    }
    
192 193
    fn get_ir_block_liveout(&self, block: &str) -> Option<&Vec<MuID>> {
        self.block_liveout.get(&block.to_string())
194 195
    }
    
196 197 198 199 200 201 202 203
    fn set_ir_block_livein(&mut self, block: &str, set: Vec<MuID>) {
        self.block_livein.insert(block.to_string(), set);
    }
    
    fn set_ir_block_liveout(&mut self, block: &str, set: Vec<MuID>) {
        self.block_liveout.insert(block.to_string(), set);
    }
    
qinsoon's avatar
qinsoon committed
204
    fn get_all_blocks(&self) -> &Vec<MuName> {
205 206 207
        &self.blocks
    }
    
208 209
    fn get_block_range(&self, block: &str) -> Option<ops::Range<usize>> {
        match self.block_range.get(&block.to_string()) {
210 211 212 213
            Some(r) => Some(r.clone()),
            None => None
        }
    }
214 215 216 217 218 219
}

struct ASM {
    code: String,
    defines: Vec<MuID>,
    uses: Vec<MuID>
220 221
}

222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245
impl ASM {
    fn symbolic(line: String) -> ASM {
        ASM {
            code: line,
            defines: vec![],
            uses: vec![]
        }
    }
    
    fn inst(inst: String, defines: Vec<MuID>, uses: Vec<MuID>) -> ASM {
        ASM {
            code: inst,
            defines: defines,
            uses: uses
        }
    }
    
    fn branch(line: String) -> ASM {
        ASM {
            code: line,
            defines: vec![],
            uses: vec![]
        }
    }
246 247 248 249 250 251 252 253
    
    fn nop() -> ASM {
        ASM {
            code: "".to_string(),
            defines: vec![],
            uses: vec![]
        }
    }
254 255
}

256
#[derive(Clone, Debug)]
257 258 259 260 261 262 263
struct ASMLocation {
    line: usize,
    index: usize,
    len: usize
}

impl ASMLocation {
264 265
    /// the 'line' field will be updated later
    fn new(index: usize, len: usize) -> ASMLocation {
266
        ASMLocation{
267
            line: usize::MAX,
268 269 270 271 272 273
            index: index,
            len: len
        }
    }
}

274
pub struct ASMCodeGen {
275
    cur: Option<Box<ASMCode>>
276 277
}

278
const REG_PLACEHOLDER_LEN : usize = 5;
279 280 281 282 283 284
lazy_static! {
    pub static ref REG_PLACEHOLDER : String = {
        let blank_spaces = [' ' as u8; REG_PLACEHOLDER_LEN];
        
        format!("%{}", str::from_utf8(&blank_spaces).unwrap())
    };
285 286 287 288
}

impl ASMCodeGen {
    pub fn new() -> ASMCodeGen {
289
        ASMCodeGen {
290
            cur: None
291 292 293 294 295 296 297 298 299 300 301
        }
    }
    
    fn cur(&self) -> &ASMCode {
        self.cur.as_ref().unwrap()
    }
    
    fn cur_mut(&mut self) -> &mut ASMCode {
        self.cur.as_mut().unwrap()
    }
    
302 303 304 305
    fn line(&self) -> usize {
        self.cur().code.len()
    }
    
306 307 308 309 310
    fn add_asm_label(&mut self, code: String) {
        let l = self.line();
        self.cur_mut().code.push(ASM::symbolic(code));
    }
    
311
    fn add_asm_block_label(&mut self, code: String, block_name: MuName) {
312 313 314
        let l = self.line();
        self.cur_mut().code.push(ASM::symbolic(code));
        
315
        self.cur_mut().idx_to_blk.insert(l, block_name.clone());
316 317 318
        self.cur_mut().blk_to_idx.insert(block_name, l);
    }
    
319 320 321 322
    fn add_asm_symbolic(&mut self, code: String){
        self.cur_mut().code.push(ASM::symbolic(code));
    }
    
323 324 325 326
    fn prepare_machine_regs(&self, regs: Iter<P<Value>>) -> Vec<MuID> {
        regs.map(|x| self.prepare_machine_reg(x)).collect()
    } 
    
327
    fn add_asm_call(&mut self, code: String) {
qinsoon's avatar
qinsoon committed
328
        // a call instruction will use all the argument registers
329 330
        let mut uses : Vec<MuID> = self.prepare_machine_regs(x86_64::ARGUMENT_GPRs.iter());
        uses.append(&mut self.prepare_machine_regs(x86_64::ARGUMENT_FPRs.iter()));
qinsoon's avatar
qinsoon committed
331 332

        // defines: return registers
333 334
        let mut defines : Vec<MuID> = self.prepare_machine_regs(x86_64::RETURN_GPRs.iter());
        defines.append(&mut self.prepare_machine_regs(x86_64::RETURN_FPRs.iter()));
qinsoon's avatar
qinsoon committed
335 336 337
        // defines: caller saved registers
        vec_utils::append_unique(&mut defines, &mut self.prepare_machine_regs(x86_64::CALLER_SAVED_GPRs.iter()));
        vec_utils::append_unique(&mut defines, &mut self.prepare_machine_regs(x86_64::CALLER_SAVED_FPRs.iter()));
338
          
qinsoon's avatar
qinsoon committed
339
        self.add_asm_inst(code, defines, vec![], uses, vec![], false);
340 341 342
    }
    
    fn add_asm_ret(&mut self, code: String) {
343 344 345
        let mut uses : Vec<MuID> = self.prepare_machine_regs(x86_64::RETURN_GPRs.iter());
        uses.append(&mut self.prepare_machine_regs(x86_64::RETURN_FPRs.iter()));
        
qinsoon's avatar
qinsoon committed
346
        self.add_asm_inst(code, vec![], vec![], uses, vec![], false);
347 348
    }
    
349
    fn add_asm_branch(&mut self, code: String, target: MuName) {
350 351
        let l = self.line();
        self.cur_mut().branches.insert(l, target);
qinsoon's avatar
qinsoon committed
352 353
        
        self.add_asm_inst(code, vec![], vec![], vec![], vec![], false);
354 355
    }
    
356
    fn add_asm_branch2(&mut self, code: String, target: MuName) {
357 358
        let l = self.line();
        self.cur_mut().cond_branches.insert(l, target);
qinsoon's avatar
qinsoon committed
359 360
        
        self.add_asm_inst(code, vec![], vec![], vec![], vec![], false);
361 362 363 364 365 366
    }
    
    fn add_asm_inst(
        &mut self, 
        code: String, 
        defines: Vec<MuID>,
367
        mut define_locs: Vec<ASMLocation>, 
368
        uses: Vec<MuID>,
qinsoon's avatar
qinsoon committed
369 370
        mut use_locs: Vec<ASMLocation>,
        is_using_mem_op: bool) 
371
    {
372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401
        let line = self.line();
        
        trace!("asm: {}", code);
        trace!("     defines: {:?}, def_locs: {:?}", defines, define_locs);
        trace!("     uses: {:?}, use_locs: {:?}", uses, use_locs);
        let mc = self.cur_mut();
       
        // add locations of defined registers
        for i in 0..define_locs.len() {
            let id = defines[i];
            
            // update line in location
            let ref mut loc = define_locs[i];
            loc.line = line;
            
            if mc.reg_defines.contains_key(&id) {
                mc.reg_defines.get_mut(&id).unwrap().push(loc.clone());
            } else {
                mc.reg_defines.insert(id, vec![loc.clone()]);
            }
        }
       
        for i in 0..use_locs.len() {
            let id = uses[i];
            
            // update line in location
            let ref mut loc = use_locs[i];
            loc.line = line;
            
            if mc.reg_uses.contains_key(&id) {
402
                mc.reg_uses.get_mut(&id).unwrap().push(loc.clone());
403
            } else {
404
                mc.reg_uses.insert(id, vec![loc.clone()]);
405 406
            }
        }
407
       
408 409
        // put the instruction
        mc.code.push(ASM::inst(code, defines, uses));
qinsoon's avatar
qinsoon committed
410
        mc.mem_op_used.insert(line, is_using_mem_op);
411 412 413 414
    }
    
    fn define_reg(&mut self, reg: &P<Value>, loc: ASMLocation) {
        let id = reg.extract_ssa_id().unwrap();
415
        
416 417 418 419 420 421 422
        let code = self.cur_mut();
        if code.reg_defines.contains_key(&id) {
            let regs = code.reg_defines.get_mut(&id).unwrap();
            regs.push(loc);
        } else {
            code.reg_defines.insert(id, vec![loc]);
        } 
423 424 425 426 427 428
    }
    
    fn use_reg(&mut self, reg: &P<Value>, loc: ASMLocation) {
        let id = reg.extract_ssa_id().unwrap();
        
        let code = self.cur_mut();
429 430 431
        if code.reg_uses.contains_key(&id) {
            let reg_uses = code.reg_uses.get_mut(&id).unwrap();
            reg_uses.push(loc);
432
        } else {
433
            code.reg_uses.insert(id, vec![loc]);
434 435 436
        } 
    }
    
437
    fn prepare_reg(&self, op: &P<Value>, loc: usize) -> (String, MuID, ASMLocation) {
438 439 440 441 442 443 444
        if cfg!(debug_assertions) {
            match op.v {
                Value_::SSAVar(_) => {},
                _ => panic!("expecting register op")
            }
        }
        
445 446 447 448 449 450
        let str = self.asm_reg_op(op);
        let len = str.len();
        (str, op.extract_ssa_id().unwrap(), ASMLocation::new(loc, len)) 
    }
    
    fn prepare_machine_reg(&self, op: &P<Value>) -> MuID {
451 452 453 454 455 456 457
        if cfg!(debug_assertions) {
            match op.v {
                Value_::SSAVar(_) => {},
                _ => panic!("expecting machine register op")
            }
        }        
        
458
        op.extract_ssa_id().unwrap()
459
    }
460
    
461 462
    #[allow(unused_assignments)]
    fn prepare_mem(&self, op: &P<Value>, loc: usize) -> (String, Vec<MuID>, Vec<ASMLocation>) {
463 464 465 466 467 468 469
        if cfg!(debug_assertions) {
            match op.v {
                Value_::Memory(_) => {},
                _ => panic!("expecting register op")
            }
        }        
        
470 471 472 473
        let mut ids : Vec<MuID> = vec![];
        let mut locs : Vec<ASMLocation> = vec![];
        let mut result_str : String = "".to_string();
        
474
        let mut loc_cursor : usize = loc;
475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556
        
        match op.v {
            // offset(base,index,scale)
            Value_::Memory(MemoryLocation::Address{ref base, ref offset, ref index, scale}) => {
                // deal with offset
                if offset.is_some() {
                    let offset = offset.as_ref().unwrap();
                    
                    match offset.v {
                        Value_::SSAVar(id) => {
                            // temp as offset
                            let (str, id, loc) = self.prepare_reg(offset, 0);
                            
                            result_str.push_str(&str);
                            ids.push(id);
                            locs.push(loc);
                            
                            loc_cursor += str.len();
                        },
                        Value_::Constant(Constant::Int(val)) => {
                            let str = val.to_string();
                            
                            result_str.push_str(&str);
                            loc_cursor += str.len();
                        },
                        _ => panic!("unexpected offset type: {:?}", offset)
                    }
                }
                
                result_str.push('(');
                loc_cursor += 1; 
                
                // deal with base, base is ssa
                let (str, id, loc) = self.prepare_reg(base, loc_cursor);
                result_str.push_str(&str);
                ids.push(id);
                locs.push(loc);
                loc_cursor += str.len();
                
                // deal with index (ssa or constant)
                if index.is_some() {
                    result_str.push(',');
                    loc_cursor += 1; // plus 1 for ,                    
                    
                    let index = index.as_ref().unwrap();
                    
                    match index.v {
                        Value_::SSAVar(id) => {
                            // temp as offset
                            let (str, id, loc) = self.prepare_reg(index, loc_cursor);
                            
                            result_str.push_str(&str);
                            ids.push(id);
                            locs.push(loc);
                            
                            loc_cursor += str.len();
                        },
                        Value_::Constant(Constant::Int(val)) => {
                            let str = val.to_string();
                            
                            result_str.push_str(&str);
                            loc_cursor += str.len();
                        },
                        _ => panic!("unexpected index type: {:?}", index)
                    }
                    
                    // scale
                    if scale.is_some() {
                        result_str.push(',');
                        loc_cursor += 1;
                        
                        let scale = scale.unwrap();
                        let str = scale.to_string();
                        
                        result_str.push_str(&str);
                        loc_cursor += str.len();
                    }
                }
                
                result_str.push(')');
                loc_cursor += 1;
            },
557 558
            Value_::Memory(MemoryLocation::Symbolic{ref base, ref label}) => {
                result_str.push_str(&symbol(label.clone()));
559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580
                loc_cursor += label.len();
                
                if base.is_some() {
                    result_str.push('(');
                    loc_cursor += 1;
                    
                    let (str, id, loc) = self.prepare_reg(base.as_ref().unwrap(), loc_cursor);
                    result_str.push_str(&str);
                    ids.push(id);
                    locs.push(loc);
                    loc_cursor += str.len();
                    
                    result_str.push(')');
                    loc_cursor += 1;                    
                }
            },
            _ => panic!("expect mem location as value")
        }
        
        (result_str, ids, locs)
    }
    
581 582
    fn asm_reg_op(&self, op: &P<Value>) -> String {
        let id = op.extract_ssa_id().unwrap();
qinsoon's avatar
qinsoon committed
583
        if id < MACHINE_ID_END {
584
            // machine reg
585
            format!("%{}", op.name().unwrap())
586 587 588 589 590 591
        } else {
            // virtual register, use place holder
            REG_PLACEHOLDER.clone()
        }
    }
    
qinsoon's avatar
qinsoon committed
592
    fn asm_block_label(&self, label: MuName) -> String {
593
        symbol(format!("{}_{}", self.cur().name, label))
594
    }
595 596 597 598 599 600 601 602 603 604 605 606 607 608
    
    fn control_flow_analysis(&mut self) {
        // control flow analysis
        let n_insts = self.line();
        
        let code = self.cur_mut();
        code.preds = vec![vec![]; n_insts];
        code.succs = vec![vec![]; n_insts];
        
        for i in 0..n_insts {
            // determine predecessor - if cur is not block start, its predecessor is previous insts
            let is_block_start = code.idx_to_blk.get(&i);
            if is_block_start.is_none() {
                if i > 0 {
609 610
                    trace!("inst {}: not a block start", i);
                    trace!("inst {}: set PREDS as previous inst {}", i, i-1);
611 612 613 614 615 616 617 618 619 620 621 622
                    code.preds[i].push(i - 1);
                }
            } else {
                // if cur is a branch target, we already set its predecessor
                // if cur is a fall-through block, we set it in a sanity check pass
            }
            
            // determine successor
            let is_branch = code.branches.get(&i);
            if is_branch.is_some() {
                // branch to target
                let target = is_branch.unwrap();
623 624
                trace!("inst {}: is a branch to {}", i, target);
                                
625
                let target_n = code.blk_to_idx.get(target).unwrap();
626
                trace!("inst {}: branch target index is {}", i, target_n);
627 628
                
                // cur inst's succ is target
629
                trace!("inst {}: set SUCCS as branch target {}", i, target_n);
630 631 632
                code.succs[i].push(*target_n);
                
                // target's pred is cur
633
                trace!("inst {}: set PREDS as branch source {}", target_n, i);
634 635 636 637 638 639
                code.preds[*target_n].push(i);
            } else {
                let is_cond_branch = code.cond_branches.get(&i);
                if is_cond_branch.is_some() {
                    // branch to target
                    let target = is_cond_branch.unwrap();
640 641
                    trace!("inst {}: is a cond branch to {}", i, target);
                    
642
                    let target_n = code.blk_to_idx.get(target).unwrap();
643
                    trace!("inst {}: branch target index is {}", i, target_n);
644 645 646
                    
                    // cur insts' succ is target and next inst
                    code.succs[i].push(*target_n);
647
                    trace!("inst {}: set SUCCS as branch target {}", i, target_n);
648
                    if i < n_insts - 1 {
649
                        trace!("inst {}: set SUCCS as next inst", i + 1);                        
650 651 652 653 654
                        code.succs[i].push(i + 1);
                    }
                    
                    // target's pred is cur
                    code.preds[*target_n].push(i);
655
                    trace!("inst {}: set PREDS as {}", *target_n, i);
656 657
                } else {
                    // not branch nor cond branch, succ is next inst
658
                    trace!("inst {}: not a branch inst", i);
659
                    if i < n_insts - 1 {
660
                        trace!("inst {}: set SUCCS as next inst {}", i, i + 1);
661 662 663 664 665 666 667 668 669 670 671 672 673
                        code.succs[i].push(i + 1);
                    }
                }
            } 
        }
        
        // a sanity check for fallthrough blocks
        for i in 0..n_insts {
            if i != 0 && code.preds[i].len() == 0 {
                code.preds[i].push(i - 1);
            }
        }        
    }
674 675 676
}

impl CodeGenerator for ASMCodeGen {
qinsoon's avatar
qinsoon committed
677
    fn start_code(&mut self, func_name: MuName) -> ValueLocation {
678
        self.cur = Some(Box::new(ASMCode {
679
                name: func_name.clone(),
680
                code: vec![],
681
                reg_defines: HashMap::new(),
682 683
                reg_uses: HashMap::new(),
                
qinsoon's avatar
qinsoon committed
684 685
                mem_op_used: HashMap::new(),
                
686 687 688 689 690 691
                preds: vec![],
                succs: vec![],
                
                idx_to_blk: HashMap::new(),
                blk_to_idx: HashMap::new(),
                cond_branches: HashMap::new(),
692 693
                branches: HashMap::new(),
                
694 695 696 697
                blocks: vec![],
                block_start: HashMap::new(),
                block_range: HashMap::new(),
                
698 699
                block_livein: HashMap::new(),
                block_liveout: HashMap::new()
700
            }));
701
        
qinsoon's avatar
qinsoon committed
702
        // to link with C sources via gcc
qinsoon's avatar
qinsoon committed
703 704 705 706
        let func_symbol = symbol(func_name.clone());
        self.add_asm_symbolic(directive_globl(func_symbol.clone()));
        self.add_asm_symbolic(format!("{}:", func_symbol.clone()));
        
707
        ValueLocation::Relocatable(RegGroup::GPR, func_name)
708 709
    }
    
710
    fn finish_code(&mut self, func_name: MuName) -> (Box<MachineCode + Sync + Send>, ValueLocation) {
711 712
        let func_end = {
            let mut symbol = func_name.clone();
qinsoon's avatar
qinsoon committed
713 714 715
            symbol.push_str("_end");
            symbol
        };
716 717
        self.add_asm_symbolic(directive_globl(symbol(func_end.clone())));
        self.add_asm_symbolic(format!("{}:", symbol(func_end.clone())));
qinsoon's avatar
qinsoon committed
718
        
719
        self.control_flow_analysis();
qinsoon's avatar
qinsoon committed
720 721 722
        
        (
            self.cur.take().unwrap(),
723
            ValueLocation::Relocatable(RegGroup::GPR, func_end)
qinsoon's avatar
qinsoon committed
724
        )
725 726 727 728 729 730 731 732 733
    }
    
    fn print_cur_code(&self) {
        println!("");
        
        if self.cur.is_some() {
            let code = self.cur.as_ref().unwrap();
            
            println!("code for {}: ", code.name);
734 735 736 737
            let n_insts = code.code.len();
            for i in 0..n_insts {
                let ref line = code.code[i];
                println!("#{}\t{}", i, line.code);
738 739 740 741 742 743 744 745
            }
        } else {
            println!("no current code");
        }
        
        println!("");
    }
    
qinsoon's avatar
qinsoon committed
746
    fn start_block(&mut self, block_name: MuName) {
747 748 749
        let label = format!("{}:", self.asm_block_label(block_name.clone()));        
        self.add_asm_block_label(label, block_name.clone());
        self.cur_mut().blocks.push(block_name.clone());
750 751 752 753 754
        
        let start = self.line();
        self.cur_mut().block_start.insert(block_name, start);
    }
    
qinsoon's avatar
qinsoon committed
755
    fn start_exception_block(&mut self, block_name: MuName) -> ValueLocation {
756 757 758
        let block = self.asm_block_label(block_name.clone());
        self.add_asm_symbolic(directive_globl(symbol(block.clone())));
        self.add_asm_symbolic(format!("{}:", symbol(block.clone())));
qinsoon's avatar
qinsoon committed
759 760 761
        
        self.start_block(block_name);
        
762
        ValueLocation::Relocatable(RegGroup::GPR, block)
qinsoon's avatar
qinsoon committed
763 764
    }
    
qinsoon's avatar
qinsoon committed
765
    fn end_block(&mut self, block_name: MuName) {
766 767 768 769
        let start : usize = *self.cur().block_start.get(&block_name).unwrap();
        let end : usize = self.line();
        
        self.cur_mut().block_range.insert(block_name, (start..end));
770 771
    }
    
qinsoon's avatar
qinsoon committed
772
    fn set_block_livein(&mut self, block_name: MuName, live_in: &Vec<P<Value>>) {
773 774 775 776
        let cur = self.cur_mut();
        
        let mut res = {
            if !cur.block_livein.contains_key(&block_name) {
777
                cur.block_livein.insert(block_name.clone(), vec![]);
778 779 780 781 782 783 784 785 786 787 788 789
            } else {
                panic!("seems we are inserting livein to block {} twice", block_name);
            }
            
            cur.block_livein.get_mut(&block_name).unwrap()
        };
        
        for value in live_in {
            res.push(value.extract_ssa_id().unwrap());
        }
    }
    
qinsoon's avatar
qinsoon committed
790
    fn set_block_liveout(&mut self, block_name: MuName, live_out: &Vec<P<Value>>) {
791 792 793 794
        let cur = self.cur_mut();
        
        let mut res = {
            if !cur.block_liveout.contains_key(&block_name) {
795
                cur.block_liveout.insert(block_name.clone(), vec![]);
796 797 798 799 800 801 802 803 804 805 806 807 808 809 810
            } else {
                panic!("seems we are inserting livein to block {} twice", block_name);
            }
            
            cur.block_liveout.get_mut(&block_name).unwrap()
        };
        
        for value in live_out {
            match value.extract_ssa_id() {
                Some(id) => res.push(id),
                None => {}
            }
        }        
    }
    
811 812 813 814 815 816 817 818 819 820 821 822 823 824 825
    fn emit_nop(&mut self, bytes: usize) {
        trace!("emit: nop ({} bytes)", bytes);
        
        let asm = String::from("nop");
        
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            vec![],
            vec![],
            false
        );
    }
    
826
    fn emit_cmp_r64_r64(&mut self, op1: &P<Value>, op2: &P<Value>) {
qinsoon's avatar
qinsoon committed
827
        trace!("emit: cmp {} {}", op1, op2);
828
        
829 830
        let (reg1, id1, loc1) = self.prepare_reg(op1, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_reg(op2, 4 + 1 + reg1.len() + 1);
831
        
qinsoon's avatar
qinsoon committed
832
        let asm = format!("cmpq {},{}", reg1, reg2);
833
        
834 835 836 837 838
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            vec![id1, id2],
qinsoon's avatar
qinsoon committed
839 840
            vec![loc1, loc2],
            false
841
        );
842 843
    }
    
844
    fn emit_cmp_r64_imm32(&mut self, op1: &P<Value>, op2: i32) {
qinsoon's avatar
qinsoon committed
845
        trace!("emit: cmp {} {}", op1, op2);
846
        
847
        let (reg1, id1, loc1) = self.prepare_reg(op1, 4 + 1 + 1 + op2.to_string().len() + 1);
848
        
qinsoon's avatar
qinsoon committed
849
        let asm = format!("cmpq ${},{}", op2, reg1);
850
        
851 852 853 854 855
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            vec![id1],
qinsoon's avatar
qinsoon committed
856 857
            vec![loc1],
            false
858
        )
859 860 861
    }
    
    fn emit_cmp_r64_mem64(&mut self, op1: &P<Value>, op2: &P<Value>) {
qinsoon's avatar
qinsoon committed
862
        trace!("emit: cmp {} {}", op1, op2);
863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880
        
        let (reg, id1, loc1) = self.prepare_reg(op1, 4 + 1);
        let (mem, mut id2, mut loc2) = self.prepare_mem(op2, 4 + 1 + reg.len() + 1);
        
        let asm = format!("cmpq {},{}", reg, mem);
        
        // merge use vec
        id2.push(id1);
        loc2.push(loc1);
        
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            id2,
            loc2,
            true
        )
881 882
    }
    
883
    fn emit_mov_r64_imm32(&mut self, dest: &P<Value>, src: i32) {
qinsoon's avatar
qinsoon committed
884
        trace!("emit: mov {} -> {}", src, dest);
885
        
886
        let (reg1, id1, loc1) = self.prepare_reg(dest, 4 + 1 + 1 + src.to_string().len() + 1);
887
        
qinsoon's avatar
qinsoon committed
888
        let asm = format!("movq ${},{}", src, reg1);
889
        
890 891 892
        self.add_asm_inst(
            asm,
            vec![id1],
893 894
            vec![loc1],
            vec![],
qinsoon's avatar
qinsoon committed
895 896
            vec![],
            false
897
        )
898 899
    }
    
900
    // load
901
    fn emit_mov_r64_mem64(&mut self, dest: &P<Value>, src: &P<Value>) {
qinsoon's avatar
qinsoon committed
902
        trace!("emit: mov {} -> {}", src, dest);
903 904 905 906 907 908 909 910 911 912 913
        
        let (mem, id1, loc1) = self.prepare_mem(src, 4 + 1);
        let (reg, id2, loc2) = self.prepare_reg(dest, 4 + 1 + mem.len() + 1);
        
        let asm = format!("movq {},{}", mem, reg);
        
        self.add_asm_inst(
            asm,
            vec![id2],
            vec![loc2],
            id1,
qinsoon's avatar
qinsoon committed
914 915
            loc1,
            true
916 917 918
        )
    }
    
919
    // store
920 921 922 923 924 925 926
    fn emit_mov_mem64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: mov {} -> {}", src, dest);
        
        let (reg, id1, loc1) = self.prepare_reg(src, 4 + 1);
        let (mem, mut id2, mut loc2) = self.prepare_mem(dest, 4 + 1 + reg.len() + 1);
        
        // the register we used for the memory location is counted as 'use'
927
        // use the vec from mem as 'use' (push use reg from src to it)
928 929 930 931 932 933 934 935 936 937
        id2.push(id1);
        loc2.push(loc1);
        
        let asm = format!("movq {},{}", reg, mem);
        
        self.add_asm_inst(
            asm,
            vec![], // not defining anything (write to memory)
            vec![],
            id2,
qinsoon's avatar
qinsoon committed
938 939
            loc2,
            true
940 941 942
        )
    }
    
943
    fn emit_mov_mem64_imm32(&mut self, dest: &P<Value>, src: i32) {
944 945 946 947 948 949 950 951 952 953 954
        trace!("emit: mov {} -> {}", src, dest);
        
        let (mem, id, loc) = self.prepare_mem(dest, 4 + 1 + 1 + src.to_string().len() + 1);
        
        let asm = format!("movq ${},{}", src, mem);
        
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            id,
qinsoon's avatar
qinsoon committed
955 956
            loc,
            true
957
        )
qinsoon's avatar
qinsoon committed
958 959 960 961
    }
    
    fn emit_mov_r64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: mov {} -> {}", src, dest);
962
        
963 964
        let (reg1, id1, loc1) = self.prepare_reg(src, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_reg(dest, 4 + 1 + reg1.len() + 1);
965
        
qinsoon's avatar
qinsoon committed
966
        let asm = format!("movq {},{}", reg1, reg2);
967
        
968 969 970
        self.add_asm_inst(
            asm,
            vec![id2],
971 972
            vec![loc2],
            vec![id1],
qinsoon's avatar
qinsoon committed
973 974
            vec![loc1],
            false
975
        )
qinsoon's avatar
qinsoon committed
976 977 978 979
    }
    
    fn emit_add_r64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: add {}, {} -> {}", dest, src, dest);
980
        
981 982
        let (reg1, id1, loc1) = self.prepare_reg(src, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_reg(dest, 4 + 1 + reg1.len() + 1);
983
        
qinsoon's avatar
qinsoon committed
984
        let asm = format!("addq {},{}", reg1, reg2);
985
        
986 987
        self.add_asm_inst(
            asm,
988 989
            vec![id2],
            vec![loc2.clone()],
990
            vec![id1, id2],
qinsoon's avatar
qinsoon committed
991 992
            vec![loc1, loc2],
            false
993
        )
qinsoon's avatar
qinsoon committed
994 995
    }
    
996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013
    fn emit_lea_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: lea {} -> {}", src, dest);
        
        let (mem, id1, loc1) = self.prepare_mem(src, 4 + 1);
        let (reg, id2, loc2) = self.prepare_reg(dest, 4 + 1 + mem.len() + 1);
        
        let asm = format!("leaq {},{}", mem, reg);
        
        self.add_asm_inst(
            asm,
            vec![id2],
            vec![loc2],
            id1,
            loc1,
            true
        ) 
    }
    
1014
    fn emit_and_r64_imm32(&mut self, dest: &P<Value>, src: i32) {
1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048
        trace!("emit: and {}, {} -> {}", src, dest, dest);
        
        let (reg1, id1, loc1) = self.prepare_reg(dest, 4 + 1 + 1 + src.to_string().len() + 1);

        let asm = format!("andq ${},{}", src, reg1);
        
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1.clone()],
            vec![id1],
            vec![loc1],
            false
        )
    }
    
    fn emit_and_r64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: and {}, {} -> {}", src, dest, dest);
        
        let (reg1, id1, loc1) = self.prepare_reg(src, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_reg(dest, 4 + 1 + reg1.len() + 1); 

        let asm = format!("andq {},{}", reg1, reg2);
        
        self.add_asm_inst(
            asm,
            vec![id2],
            vec![loc2.clone()],
            vec![id1, id2],
            vec![loc1, loc2],
            false
        )
    }    
    
qinsoon's avatar
qinsoon committed
1049 1050
    fn emit_add_r64_mem64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: add {}, {} -> {}", dest, src, dest);
1051
        unimplemented!()
qinsoon's avatar
qinsoon committed
1052 1053
    }
    
1054
    fn emit_add_r64_imm32(&mut self, dest: &P<Value>, src: i32) {
qinsoon's avatar
qinsoon committed
1055
        trace!("emit: add {}, {} -> {}", dest, src, dest);
1056
        
1057
        let (reg1, id1, loc1) = self.prepare_reg(dest, 4 + 1 + 1 + src.to_string().len() + 1);
1058
        
1059
        let asm = format!("addq ${},{}", src, reg1);
1060
        
1061 1062 1063 1064 1065
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1.clone()],
            vec![id1],
qinsoon's avatar
qinsoon committed
1066 1067
            vec![loc1],
            false
1068
        )
qinsoon's avatar
qinsoon committed
1069 1070 1071 1072
    }
    
    fn emit_sub_r64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: sub {}, {} -> {}", dest, src, dest);
1073
        
1074 1075
        let (reg1, id1, loc1) = self.prepare_reg(src, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_reg(dest, 4 + 1 + reg1.len() + 1);
1076
        
qinsoon's avatar
qinsoon committed
1077
        let asm = format!("subq {},{}", reg1, reg2);
1078
        
1079 1080
        self.add_asm_inst(
            asm,
1081 1082
            vec![id2],
            vec![loc2.clone()],
1083
            vec![id1, id2],
qinsoon's avatar
qinsoon committed
1084 1085
            vec![loc1, loc2],
            false
1086
        )        
qinsoon's avatar
qinsoon committed
1087 1088 1089 1090
    }
    
    fn emit_sub_r64_mem64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: sub {}, {} -> {}", dest, src, dest);
1091
        unimplemented!()
qinsoon's avatar
qinsoon committed
1092 1093
    }
    
1094
    fn emit_sub_r64_imm32(&mut self, dest: &P<Value>, src: i32) {
qinsoon's avatar
qinsoon committed
1095
        trace!("emit: sub {}, {} -> {}", dest, src, dest);
1096
        
1097
        let (reg1, id1, loc1) = self.prepare_reg(dest, 4 + 1 + 1 + src.to_string().len() + 1);
1098
        
qinsoon's avatar
qinsoon committed
1099
        let asm = format!("subq ${},{}", src, reg1);
1100
        
1101 1102 1103 1104 1105
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1.clone()],
            vec![id1],
qinsoon's avatar
qinsoon committed
1106 1107
            vec![loc1],
            false
1108
        )        
qinsoon's avatar
qinsoon committed
1109 1110
    }
    
1111
    fn emit_mul_r64(&mut self, src: &P<Value>) {
1112
        trace!("emit: mul rax, {} -> (rdx, rax)", src);
1113
        
1114
        let (reg, id, loc) = self.prepare_reg(src, 3 + 1);
1115 1116
        let rax = self.prepare_machine_reg(&x86_64::RAX);
        let rdx = self.prepare_machine_reg(&x86_64::RDX);
1117 1118 1119
        
        let asm = format!("mul {}", reg);
        
1120 1121 1122 1123 1124
        self.add_asm_inst(
            asm,
            vec![rax, rdx],
            vec![],
            vec![id, rax],
qinsoon's avatar
qinsoon committed
1125 1126
            vec![loc],
            false
1127
        )
1128 1129 1130 1131
    }
    
    fn emit_mul_mem64(&mut self, src: &P<Value>) {
        trace!("emit: mul rax, {} -> rax", src);
1132
        unimplemented!()
1133 1134
    }
    
1135
    fn emit_jmp(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1136
        trace!("emit: jmp {}", dest_name);
1137 1138
        
        // symbolic label, we dont need to patch it
1139
        let asm = format!("jmp {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1140
        self.add_asm_branch(asm, dest_name)
qinsoon's avatar
qinsoon committed
1141 1142
    }
    
1143
    fn emit_je(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1144
        trace!("emit: je {}", dest_name);
1145
        
1146
        let asm = format!("je {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1147
        self.add_asm_branch2(asm, dest_name);        
qinsoon's avatar
qinsoon committed
1148 1149
    }
    
1150
    fn emit_jne(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1151
        trace!("emit: jne {}", dest_name);
1152
        
1153
        let asm = format!("jne {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1154
        self.add_asm_branch2(asm, dest_name);
qinsoon's avatar
qinsoon committed
1155 1156
    }
    
1157
    fn emit_ja(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1158
        trace!("emit: ja {}", dest_name);
1159
        
1160
        let asm = format!("ja {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1161
        self.add_asm_branch2(asm, dest_name);
qinsoon's avatar
qinsoon committed
1162 1163
    }
    
1164
    fn emit_jae(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1165
        trace!("emit: jae {}", dest_name);
1166
        
1167
        let asm = format!("jae {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1168
        self.add_asm_branch2(asm, dest_name);        
qinsoon's avatar
qinsoon committed
1169 1170
    }
    
1171
    fn emit_jb(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1172
        trace!("emit: jb {}", dest_name);
1173
        
1174
        let asm = format!("jb {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1175
        self.add_asm_branch2(asm, dest_name);
qinsoon's avatar
qinsoon committed
1176 1177
    }
    
1178
    fn emit_jbe(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1179
        trace!("emit: jbe {}", dest_name);
1180
        
1181
        let asm = format!("jbe {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1182
        self.add_asm_branch2(asm, dest_name);        
qinsoon's avatar
qinsoon committed
1183 1184
    }
    
1185
    fn emit_jg(&mut self, dest_name: MuName) {
qinsoon's avatar
qinsoon committed
1186
        trace!("emit: jg {}", dest_name);
1187
        
1188
        let asm = format!("jg {}", self.asm_block_label(dest_name.clone()));
qinsoon's avatar
qinsoon committed
1189
        self.add_asm_branch2(asm, dest_name);        
1190 1191
    }
    
1192
    fn emit_jge(&mut self