asm_backend.rs 20.4 KB
Newer Older
1 2
#![allow(unused_variables)]

3
use compiler::backend::x86_64;
4
use compiler::backend::x86_64::CodeGenerator;
5
use vm::machine_code::MachineCode;
6 7 8

use ast::ptr::P;
use ast::ir::*;
qinsoon's avatar
qinsoon committed
9
use ast::inst::*;
10

11 12
use std::collections::HashMap;
use std::str;
13
use std::usize;
14 15 16

struct ASMCode {
    name: MuTag, 
17 18
    code: Vec<ASM>,
    reg_defines: HashMap<MuID, Vec<ASMLocation>>,
19 20 21 22 23 24 25 26 27
    reg_uses: HashMap<MuID, Vec<ASMLocation>>,
    
    preds: Vec<Vec<usize>>,
    succs: Vec<Vec<usize>>,
    
    idx_to_blk: HashMap<usize, MuTag>,
    blk_to_idx: HashMap<MuTag, usize>,
    cond_branches: HashMap<usize, MuTag>,
    branches: HashMap<usize, MuTag>
28 29
}

30 31 32 33
impl MachineCode for ASMCode {
    fn number_of_insts(&self) -> usize {
        self.code.len()
    }
34
    
35 36 37 38 39 40 41 42
    fn is_move(&self, index: usize) -> bool {
        let inst = self.code.get(index);
        match inst {
            Some(inst) => inst.code.starts_with("mov"),
            None => false
        }
    }
    
43 44
    fn get_inst_reg_uses(&self, index: usize) -> &Vec<MuID> {
        &self.code[index].defines
45 46
    }
    
47 48
    fn get_inst_reg_defines(&self, index: usize) -> &Vec<MuID> {
        &self.code[index].uses
49
    }
50 51 52 53 54 55 56 57 58 59 60 61
    
    fn print(&self) {
        println!("");

        println!("code for {}: ", self.name);
        let n_insts = self.code.len();
        for i in 0..n_insts {
            let ref line = self.code[i];
            println!("#{}\t{}\t\tpred: {:?}, succ: {:?}", i, line.code, self.preds[i], self.succs[i]);
        }
        
        println!("");        
62 63 64 65 66 67 68
    }
}

struct ASM {
    code: String,
    defines: Vec<MuID>,
    uses: Vec<MuID>
69 70
}

71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94
impl ASM {
    fn symbolic(line: String) -> ASM {
        ASM {
            code: line,
            defines: vec![],
            uses: vec![]
        }
    }
    
    fn inst(inst: String, defines: Vec<MuID>, uses: Vec<MuID>) -> ASM {
        ASM {
            code: inst,
            defines: defines,
            uses: uses
        }
    }
    
    fn branch(line: String) -> ASM {
        ASM {
            code: line,
            defines: vec![],
            uses: vec![]
        }
    }
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
    
    fn call(line: String) -> ASM {
        ASM {
            code: line,
            defines: vec![],
            uses: vec![]
        }
    }
    
    fn ret(line: String) -> ASM {
        ASM {
            code: line,
            defines: vec![],
            uses: vec![]
        }
    }
111 112
}

113
#[derive(Clone, Debug)]
114 115 116 117 118 119 120
struct ASMLocation {
    line: usize,
    index: usize,
    len: usize
}

impl ASMLocation {
121 122
    /// the 'line' field will be updated later
    fn new(index: usize, len: usize) -> ASMLocation {
123
        ASMLocation{
124
            line: usize::MAX,
125 126 127 128 129 130
            index: index,
            len: len
        }
    }
}

131
pub struct ASMCodeGen {
132
    cur: Option<Box<ASMCode>>
133 134
}

135 136 137 138 139 140 141
const REG_PLACEHOLDER_LEN : usize = 3;
lazy_static! {
    pub static ref REG_PLACEHOLDER : String = {
        let blank_spaces = [' ' as u8; REG_PLACEHOLDER_LEN];
        
        format!("%{}", str::from_utf8(&blank_spaces).unwrap())
    };
142 143 144 145
}

impl ASMCodeGen {
    pub fn new() -> ASMCodeGen {
146
        ASMCodeGen {
147
            cur: None
148 149 150 151 152 153 154 155 156 157 158
        }
    }
    
    fn cur(&self) -> &ASMCode {
        self.cur.as_ref().unwrap()
    }
    
    fn cur_mut(&mut self) -> &mut ASMCode {
        self.cur.as_mut().unwrap()
    }
    
159 160 161 162
    fn line(&self) -> usize {
        self.cur().code.len()
    }
    
163 164 165 166 167 168 169 170 171 172 173 174
    fn replace(s: &mut String, index: usize, replace: &str, replace_len: usize) {
        let vec = unsafe {s.as_mut_vec()};
        
        for i in 0..replace_len {
            if i < replace.len() {
                vec[index + i] = replace.as_bytes()[i] as u8;
            } else {
                vec[index + i] = ' ' as u8;
            }
        }
    }
    
175 176 177 178 179 180 181 182
    fn add_asm_block_label(&mut self, code: String, block_name: &'static str) {
        let l = self.line();
        self.cur_mut().code.push(ASM::symbolic(code));
        
        self.cur_mut().idx_to_blk.insert(l, block_name);
        self.cur_mut().blk_to_idx.insert(block_name, l);
    }
    
183 184 185 186
    fn add_asm_symbolic(&mut self, code: String){
        self.cur_mut().code.push(ASM::symbolic(code));
    }
    
187 188 189 190 191 192 193 194 195 196
    fn add_asm_call(&mut self, code: String) {
        self.cur_mut().code.push(ASM::call(code));
    }
    
    fn add_asm_ret(&mut self, code: String) {
        self.cur_mut().code.push(ASM::ret(code));
    }
    
    fn add_asm_branch(&mut self, code: String, target: &'static str) {
        let l = self.line();
197
        self.cur_mut().code.push(ASM::branch(code));
198 199 200 201 202 203 204 205 206
        
        self.cur_mut().branches.insert(l, target);
    }
    
    fn add_asm_branch2(&mut self, code: String, target: &'static str) {
        let l = self.line();
        self.cur_mut().code.push(ASM::branch(code));
        
        self.cur_mut().cond_branches.insert(l, target);
207 208 209 210 211 212
    }
    
    fn add_asm_inst(
        &mut self, 
        code: String, 
        defines: Vec<MuID>,
213
        mut define_locs: Vec<ASMLocation>, 
214
        uses: Vec<MuID>,
215
        mut use_locs: Vec<ASMLocation>) 
216
    {
217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251
        let line = self.line();
        
        trace!("asm: {}", code);
        trace!("     defines: {:?}, def_locs: {:?}", defines, define_locs);
        trace!("     uses: {:?}, use_locs: {:?}", uses, use_locs);
        let mc = self.cur_mut();
       
        // add locations of defined registers
        for i in 0..define_locs.len() {
            let id = defines[i];
            
            // update line in location
            let ref mut loc = define_locs[i];
            loc.line = line;
            
            if mc.reg_defines.contains_key(&id) {
                mc.reg_defines.get_mut(&id).unwrap().push(loc.clone());
            } else {
                mc.reg_defines.insert(id, vec![loc.clone()]);
            }
        }
       
        for i in 0..use_locs.len() {
            let id = uses[i];
            
            // update line in location
            let ref mut loc = use_locs[i];
            loc.line = line;
            
            if mc.reg_uses.contains_key(&id) {
                mc.reg_defines.get_mut(&id).unwrap().push(loc.clone());
            } else {
                mc.reg_defines.insert(id, vec![loc.clone()]);
            }
        }
252
       
253 254
        // put the instruction
        mc.code.push(ASM::inst(code, defines, uses));
255 256 257 258
    }
    
    fn define_reg(&mut self, reg: &P<Value>, loc: ASMLocation) {
        let id = reg.extract_ssa_id().unwrap();
259
        
260 261 262 263 264 265 266
        let code = self.cur_mut();
        if code.reg_defines.contains_key(&id) {
            let regs = code.reg_defines.get_mut(&id).unwrap();
            regs.push(loc);
        } else {
            code.reg_defines.insert(id, vec![loc]);
        } 
267 268 269 270 271 272
    }
    
    fn use_reg(&mut self, reg: &P<Value>, loc: ASMLocation) {
        let id = reg.extract_ssa_id().unwrap();
        
        let code = self.cur_mut();
273 274 275
        if code.reg_uses.contains_key(&id) {
            let reg_uses = code.reg_uses.get_mut(&id).unwrap();
            reg_uses.push(loc);
276
        } else {
277
            code.reg_uses.insert(id, vec![loc]);
278 279 280
        } 
    }
    
281 282 283 284 285 286 287 288
    fn prepare_op(&self, op: &P<Value>, loc: usize) -> (String, MuID, ASMLocation) {
        let str = self.asm_reg_op(op);
        let len = str.len();
        (str, op.extract_ssa_id().unwrap(), ASMLocation::new(loc, len)) 
    }
    
    fn prepare_machine_reg(&self, op: &P<Value>) -> MuID {
        op.extract_ssa_id().unwrap()
289
    }
290
    
291 292 293 294 295 296 297 298 299 300 301 302 303
    fn asm_reg_op(&self, op: &P<Value>) -> String {
        let id = op.extract_ssa_id().unwrap();
        if id < RESERVED_NODE_IDS_FOR_MACHINE {
            // machine reg
            format!("%{}", op.tag)
        } else {
            // virtual register, use place holder
            REG_PLACEHOLDER.clone()
        }
    }
    
    fn asm_block_label(&self, label: MuTag) -> String {
        format!("{}_{}", self.cur().name, label)
304
    }
305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368
    
    fn control_flow_analysis(&mut self) {
        // control flow analysis
        let n_insts = self.line();
        
        let code = self.cur_mut();
        code.preds = vec![vec![]; n_insts];
        code.succs = vec![vec![]; n_insts];
        
        for i in 0..n_insts {
            // determine predecessor - if cur is not block start, its predecessor is previous insts
            let is_block_start = code.idx_to_blk.get(&i);
            if is_block_start.is_none() {
                if i > 0 {
                    code.preds[i].push(i - 1);
                }
            } else {
                // if cur is a branch target, we already set its predecessor
                // if cur is a fall-through block, we set it in a sanity check pass
            }
            
            // determine successor
            let is_branch = code.branches.get(&i);
            if is_branch.is_some() {
                // branch to target
                let target = is_branch.unwrap();
                let target_n = code.blk_to_idx.get(target).unwrap();
                
                // cur inst's succ is target
                code.succs[i].push(*target_n);
                
                // target's pred is cur
                code.preds[*target_n].push(i);
            } else {
                let is_cond_branch = code.cond_branches.get(&i);
                if is_cond_branch.is_some() {
                    // branch to target
                    let target = is_cond_branch.unwrap();
                    let target_n = code.blk_to_idx.get(target).unwrap();
                    
                    // cur insts' succ is target and next inst
                    code.succs[i].push(*target_n);
                    if i < n_insts - 1 {
                        code.succs[i].push(i + 1);
                    }
                    
                    // target's pred is cur
                    code.preds[*target_n].push(i);
                } else {
                    // not branch nor cond branch, succ is next inst
                    if i < n_insts - 1 {
                        code.succs[i].push(i + 1);
                    }
                }
            } 
        }
        
        // a sanity check for fallthrough blocks
        for i in 0..n_insts {
            if i != 0 && code.preds[i].len() == 0 {
                code.preds[i].push(i - 1);
            }
        }        
    }
369 370 371
}

impl CodeGenerator for ASMCodeGen {
372
    fn start_code(&mut self, func_name: MuTag) {
373
        self.cur = Some(Box::new(ASMCode {
374 375
                name: func_name,
                code: vec![],
376
                reg_defines: HashMap::new(),
377 378 379 380 381 382 383 384 385 386
                reg_uses: HashMap::new(),
                
                preds: vec![],
                succs: vec![],
                
                idx_to_blk: HashMap::new(),
                blk_to_idx: HashMap::new(),
                cond_branches: HashMap::new(),
                branches: HashMap::new()
            }));
387
        
388
        self.add_asm_symbolic(format!(".globl {}", func_name));
389 390
    }
    
391 392 393
    fn finish_code(&mut self) -> Box<MachineCode> {
        self.control_flow_analysis();
        self.cur.take().unwrap()
394 395 396 397 398 399 400 401 402
    }
    
    fn print_cur_code(&self) {
        println!("");
        
        if self.cur.is_some() {
            let code = self.cur.as_ref().unwrap();
            
            println!("code for {}: ", code.name);
403 404 405 406
            let n_insts = code.code.len();
            for i in 0..n_insts {
                let ref line = code.code[i];
                println!("#{}\t{}", i, line.code);
407 408 409 410 411 412 413 414 415
            }
        } else {
            println!("no current code");
        }
        
        println!("");
    }
    
    fn start_block(&mut self, block_name: MuTag) {
416 417
        let label = format!("{}:", self.asm_block_label(block_name));        
        self.add_asm_block_label(label, block_name);
418 419
    }
    
420
    fn emit_cmp_r64_r64(&mut self, op1: &P<Value>, op2: &P<Value>) {
qinsoon's avatar
qinsoon committed
421
        trace!("emit: cmp {} {}", op1, op2);
422
        
423 424
        let (reg1, id1, loc1) = self.prepare_op(op1, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_op(op2, 4 + 1 + reg1.len() + 1);
425 426 427
        
        let asm = format!("cmpq {} {}", reg1, reg2);
        
428 429 430 431 432 433 434
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            vec![id1, id2],
            vec![loc1, loc2]
        );
435 436
    }
    
qinsoon's avatar
qinsoon committed
437 438
    fn emit_cmp_r64_imm32(&mut self, op1: &P<Value>, op2: u32) {
        trace!("emit: cmp {} {}", op1, op2);
439
        
440
        let (reg1, id1, loc1) = self.prepare_op(op1, 4 + 1);
441 442 443
        
        let asm = format!("cmpq {} ${}", reg1, op2);
        
444 445 446 447 448 449 450
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            vec![id1],
            vec![loc1]
        )
451 452 453
    }
    
    fn emit_cmp_r64_mem64(&mut self, op1: &P<Value>, op2: &P<Value>) {
qinsoon's avatar
qinsoon committed
454
        trace!("emit: cmp {} {}", op1, op2);
455
        unimplemented!()
456 457
    }
    
qinsoon's avatar
qinsoon committed
458 459
    fn emit_mov_r64_imm32(&mut self, dest: &P<Value>, src: u32) {
        trace!("emit: mov {} -> {}", src, dest);
460
        
461
        let (reg1, id1, loc1) = self.prepare_op(dest, 4 + 1);
462 463 464
        
        let asm = format!("movq {} ${}", src, reg1);
        
465 466 467 468 469 470 471
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            vec![id1],
            vec![loc1]
        )
472 473 474
    }
    
    fn emit_mov_r64_mem64(&mut self, dest: &P<Value>, src: &P<Value>) {
qinsoon's avatar
qinsoon committed
475
        trace!("emit: mov {} -> {}", src, dest);
476
        unimplemented!()
qinsoon's avatar
qinsoon committed
477 478 479 480
    }
    
    fn emit_mov_r64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: mov {} -> {}", src, dest);
481
        
482 483
        let (reg1, id1, loc1) = self.prepare_op(dest, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_op(src, 4 + 1 + reg1.len() + 1);
484 485 486
        
        let asm = format!("movq {} {}", reg2, reg1);
        
487 488 489 490 491 492 493
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1],
            vec![id2],
            vec![loc2]
        )
qinsoon's avatar
qinsoon committed
494 495 496 497
    }
    
    fn emit_add_r64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: add {}, {} -> {}", dest, src, dest);
498
        
499 500
        let (reg1, id1, loc1) = self.prepare_op(dest, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_op(src, 4 + 1 + reg1.len() + 1);
501 502 503
        
        let asm = format!("addq {} {}", reg2, reg1);
        
504 505 506 507 508 509 510
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1.clone()],
            vec![id1, id2],
            vec![loc1, loc2]
        )
qinsoon's avatar
qinsoon committed
511 512 513 514
    }
    
    fn emit_add_r64_mem64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: add {}, {} -> {}", dest, src, dest);
515
        unimplemented!()
qinsoon's avatar
qinsoon committed
516 517 518 519
    }
    
    fn emit_add_r64_imm32(&mut self, dest: &P<Value>, src: u32) {
        trace!("emit: add {}, {} -> {}", dest, src, dest);
520
        
521
        let (reg1, id1, loc1) = self.prepare_op(dest, 4 + 1);
522 523 524
        
        let asm = format!("addq {} ${}", src, reg1);
        
525 526 527 528 529 530 531
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1.clone()],
            vec![id1],
            vec![loc1]
        )
qinsoon's avatar
qinsoon committed
532 533 534 535
    }
    
    fn emit_sub_r64_r64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: sub {}, {} -> {}", dest, src, dest);
536
        
537 538
        let (reg1, id1, loc1) = self.prepare_op(dest, 4 + 1);
        let (reg2, id2, loc2) = self.prepare_op(src, 4 + 1 + reg1.len() + 1);
539 540 541
        
        let asm = format!("subq {} {}", reg2, reg1);
        
542 543 544 545 546 547 548
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1.clone()],
            vec![id1, id2],
            vec![loc1, loc2]
        )        
qinsoon's avatar
qinsoon committed
549 550 551 552
    }
    
    fn emit_sub_r64_mem64(&mut self, dest: &P<Value>, src: &P<Value>) {
        trace!("emit: sub {}, {} -> {}", dest, src, dest);
553
        unimplemented!()
qinsoon's avatar
qinsoon committed
554 555 556 557
    }
    
    fn emit_sub_r64_imm32(&mut self, dest: &P<Value>, src: u32) {
        trace!("emit: sub {}, {} -> {}", dest, src, dest);
558
        
559
        let (reg1, id1, loc1) = self.prepare_op(dest, 4 + 1);
560 561 562
        
        let asm = format!("subq {} ${}", src, reg1);
        
563 564 565 566 567 568 569
        self.add_asm_inst(
            asm,
            vec![id1],
            vec![loc1.clone()],
            vec![id1],
            vec![loc1]
        )        
qinsoon's avatar
qinsoon committed
570 571
    }
    
572
    fn emit_mul_r64(&mut self, src: &P<Value>) {
573
        trace!("emit: mul rax, {} -> (rdx, rax)", src);
574
        
575 576 577
        let (reg, id, loc) = self.prepare_op(src, 3 + 1);
        let rax = self.prepare_machine_reg(&x86_64::RAX);
        let rdx = self.prepare_machine_reg(&x86_64::RDX);
578 579 580
        
        let asm = format!("mul {}", reg);
        
581 582 583 584 585 586 587
        self.add_asm_inst(
            asm,
            vec![rax, rdx],
            vec![],
            vec![id, rax],
            vec![loc]
        )
588 589 590 591
    }
    
    fn emit_mul_mem64(&mut self, src: &P<Value>) {
        trace!("emit: mul rax, {} -> rax", src);
592
        unimplemented!()
593 594
    }
    
qinsoon's avatar
qinsoon committed
595 596
    fn emit_jmp(&mut self, dest: &Destination) {
        trace!("emit: jmp {}", dest.target);
597 598 599
        
        // symbolic label, we dont need to patch it
        let asm = format!("jmp {}", self.asm_block_label(dest.target));
600
        self.add_asm_branch(asm, dest.target)
qinsoon's avatar
qinsoon committed
601 602 603 604
    }
    
    fn emit_je(&mut self, dest: &Destination) {
        trace!("emit: je {}", dest.target);
605 606
        
        let asm = format!("je {}", self.asm_block_label(dest.target));
607
        self.add_asm_branch2(asm, dest.target);        
qinsoon's avatar
qinsoon committed
608 609 610 611
    }
    
    fn emit_jne(&mut self, dest: &Destination) {
        trace!("emit: jne {}", dest.target);
612 613
        
        let asm = format!("jne {}", self.asm_block_label(dest.target));
614
        self.add_asm_branch2(asm, dest.target);
qinsoon's avatar
qinsoon committed
615 616 617 618
    }
    
    fn emit_ja(&mut self, dest: &Destination) {
        trace!("emit: ja {}", dest.target);
619 620
        
        let asm = format!("ja {}", self.asm_block_label(dest.target));
621
        self.add_asm_branch2(asm, dest.target);
qinsoon's avatar
qinsoon committed
622 623 624 625
    }
    
    fn emit_jae(&mut self, dest: &Destination) {
        trace!("emit: jae {}", dest.target);
626 627
        
        let asm = format!("jae {}", self.asm_block_label(dest.target));
628
        self.add_asm_branch2(asm, dest.target);        
qinsoon's avatar
qinsoon committed
629 630 631 632
    }
    
    fn emit_jb(&mut self, dest: &Destination) {
        trace!("emit: jb {}", dest.target);
633 634
        
        let asm = format!("jb {}", self.asm_block_label(dest.target));
635
        self.add_asm_branch2(asm, dest.target);
qinsoon's avatar
qinsoon committed
636 637 638 639
    }
    
    fn emit_jbe(&mut self, dest: &Destination) {
        trace!("emit: jbe {}", dest.target);
640 641
        
        let asm = format!("jbe {}", self.asm_block_label(dest.target));
642
        self.add_asm_branch2(asm, dest.target);        
qinsoon's avatar
qinsoon committed
643 644
    }
    
645 646
    fn emit_jg(&mut self, dest: &Destination) {
        trace!("emit: jg {}", dest.target);
647 648
        
        let asm = format!("jg {}", self.asm_block_label(dest.target));
649
        self.add_asm_branch2(asm, dest.target);        
650 651 652 653
    }
    
    fn emit_jge(&mut self, dest: &Destination) {
        trace!("emit: jge {}", dest.target);
654 655
        
        let asm = format!("jge {}", self.asm_block_label(dest.target));
656
        self.add_asm_branch2(asm, dest.target);        
657 658 659 660
    }
    
    fn emit_jl(&mut self, dest: &Destination) {
        trace!("emit: jl {}", dest.target);
661 662
        
        let asm = format!("jl {}", self.asm_block_label(dest.target));
663
        self.add_asm_branch2(asm, dest.target);        
664 665 666 667
    }
    
    fn emit_jle(&mut self, dest: &Destination) {
        trace!("emit: jle {}", dest.target);
668 669
        
        let asm = format!("jle {}", self.asm_block_label(dest.target));
670
        self.add_asm_branch2(asm, dest.target);        
671 672
    }    
    
673 674
    fn emit_call_near_rel32(&mut self, func: MuTag) {
        trace!("emit: call {}", func);
675 676
        
        let asm = format!("call {}", func);
677
        self.add_asm_call(asm);
678 679
        
        // FIXME: call interferes with machine registers
680 681 682 683
    }
    
    fn emit_call_near_r64(&mut self, func: &P<Value>) {
        trace!("emit: call {}", func);
684
        unimplemented!()
685 686 687
    }
    
    fn emit_call_near_mem64(&mut self, func: &P<Value>) {
qinsoon's avatar
qinsoon committed
688
        trace!("emit: call {}", func);
689
        unimplemented!()
qinsoon's avatar
qinsoon committed
690 691 692 693
    }
    
    fn emit_ret(&mut self) {
        trace!("emit: ret");
694 695
        
        let asm = format!("ret");
696
        self.add_asm_ret(asm);
697
    }
698
    
699
    fn emit_push_r64(&mut self, src: &P<Value>) {
700
        trace!("emit: push {}", src);
701
        
702
        let (reg, id, loc) = self.prepare_op(src, 5 + 1);
703 704 705
        
        let asm = format!("pushq {}", reg);
        
706 707 708 709 710 711 712
        self.add_asm_inst(
            asm,
            vec![],
            vec![],
            vec![id],
            vec![loc]
        )
713 714
    }
    
715
    fn emit_pop_r64(&mut self, dest: &P<Value>) {
716
        trace!("emit: pop {}", dest);
717
        
718
        let (reg, id, loc) = self.prepare_op(dest, 4 + 1);
719 720 721
        
        let asm = format!("popq {}", reg);
        
722 723 724 725 726 727 728
        self.add_asm_inst(
            asm,
            vec![id],
            vec![loc.clone()],
            vec![id],
            vec![loc]
        )        
729
    }    
730
}