asm_backend.rs 146 KB
Newer Older
1
// Copyright 2017 The Australian National University
2
//
3 4 5
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
6
//
7
//     http://www.apache.org/licenses/LICENSE-2.0
8
//
9 10 11 12 13 14
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15 16
#![allow(unused_variables)]

17
use compiler::backend::AOT_EMIT_CONTEXT_FILE;
qinsoon's avatar
qinsoon committed
18
use compiler::backend::RegGroup;
qinsoon's avatar
qinsoon committed
19
use utils::ByteSize;
20 21
use utils::Address;
use utils::POINTER_SIZE;
22
use compiler::backend::x86_64;
23
use compiler::backend::x86_64::CodeGenerator;
24
use compiler::backend::{Reg, Mem};
25
use compiler::backend::x86_64::check_op_len;
qinsoon's avatar
qinsoon committed
26
use compiler::machine_code::MachineCode;
qinsoon's avatar
qinsoon committed
27
use vm::VM;
qinsoon's avatar
qinsoon committed
28
use runtime::ValueLocation;
29

qinsoon's avatar
qinsoon committed
30
use utils::vec_utils;
31
use utils::string_utils;
32 33
use utils::LinkedHashMap;

34 35
use ast::ptr::P;
use ast::ir::*;
36
use ast::types;
37

38
use std::str;
39
use std::usize;
40
use std::slice::Iter;
41
use std::ops;
42
use std::collections::HashSet;
43
use std::sync::{RwLock, Arc};
qinsoon's avatar
qinsoon committed
44
use std::any::Any;
45

qinsoon's avatar
qinsoon committed
46 47 48 49 50 51 52
/// ASMCode represents a segment of assembly machine code. Usually it is machine code for
/// a Mu function, but it could simply be a sequence of machine code.
/// This data structure implements MachineCode trait which allows compilation passes to
/// operate on the machine code in a machine independent way.
/// This data structure is also designed in a way to support in-place code generation. Though
/// in-place code generation is mostly irrelevant for ahead-of-time compilation, I tried
/// test the idea with this AOT backend.
53
struct ASMCode {
qinsoon's avatar
qinsoon committed
54 55 56
    /// function name for the code
    name: MuName,
    /// a list of all the assembly instructions
qinsoon's avatar
qinsoon committed
57
    code: Vec<ASMInst>,
qinsoon's avatar
qinsoon committed
58
    /// entry block name
59
    entry: MuName,
qinsoon's avatar
qinsoon committed
60
    /// all the blocks
61
    blocks: LinkedHashMap<MuName, ASMBlock>,
qinsoon's avatar
qinsoon committed
62 63 64 65
    /// the patch location for frame size growth/shrink
    /// we only know the exact frame size after register allocation, but we need to insert
    /// frame adjust code beforehand, so we insert adjust code with an empty frame size, and
    /// patch it later
66
    frame_size_patchpoints: Vec<ASMLocation>
67 68
}

69
unsafe impl Send for ASMCode {}
qinsoon's avatar
qinsoon committed
70 71
unsafe impl Sync for ASMCode {}

qinsoon's avatar
qinsoon committed
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94
/// ASMInst represents an assembly instruction.
/// This data structure contains enough information to implement MachineCode trait on ASMCode,
/// and it also supports in-place code generation.
#[derive(Clone, Debug)]
struct ASMInst {
    /// actual asm code
    code: String,
    /// defines of this instruction. a map from temporary/register ID to its location
    /// (where it appears in the code string)
    defines: LinkedHashMap<MuID, Vec<ASMLocation>>,
    /// uses of this instruction
    uses: LinkedHashMap<MuID, Vec<ASMLocation>>,
    /// is this instruction using memory operand?
    is_mem_op_used: bool,
    /// is this assembly code a symbol? (not an actual instruction)
    is_symbol: bool,
    /// is this instruction an inserted spill instruction (load from/store to memory)?
    spill_info: Option<SpillMemInfo>,
    /// predecessors of this instruction
    preds: Vec<usize>,
    /// successors of this instruction
    succs: Vec<usize>,
    /// branch target of this instruction
95
    branch: ASMBranchTarget
qinsoon's avatar
qinsoon committed
96 97 98 99 100 101 102 103 104 105 106 107 108
}

/// ASMLocation represents the location of a register/temporary in assembly code.
/// It contains enough information so that we can later patch the register.
#[derive(Clone, Debug, PartialEq, Eq)]
struct ASMLocation {
    /// which row it is in the assembly code vector
    line: usize,
    /// which column
    index: usize,
    /// length of spaces reserved for the register/temporary
    len: usize,
    /// bit-length of the register/temporary
109
    oplen: usize
qinsoon's avatar
qinsoon committed
110 111 112 113 114 115 116 117 118 119 120 121
}

/// ASMBlock represents information about a basic block in assembly.
#[derive(Clone, Debug)]
struct ASMBlock {
    /// [start_inst, end_inst) (includes start_inst)
    start_inst: usize,
    /// [start_inst, end_inst) (excludes end_inst)
    end_inst: usize,
    /// livein reg/temp
    livein: Vec<MuID>,
    /// liveout reg/temp
122
    liveout: Vec<MuID>
qinsoon's avatar
qinsoon committed
123 124 125 126 127 128 129 130 131 132 133 134 135 136
}

/// ASMBranchTarget represents branching control flow of machine instructions.
#[derive(Clone, Debug)]
enum ASMBranchTarget {
    /// not a branching instruction
    None,
    /// a conditional branch to target
    Conditional(MuName),
    /// an unconditional branch to target
    Unconditional(MuName),
    /// this instruction may throw exception to target
    PotentiallyExcepting(MuName),
    /// this instruction is a return
137
    Return
qinsoon's avatar
qinsoon committed
138 139 140 141 142 143 144
}

/// SpillMemInfo represents inserted spilling instructions for loading/storing values
#[derive(Clone, Debug)]
enum SpillMemInfo {
    Load(P<Value>),
    Store(P<Value>),
145
    CalleeSaved // Callee saved record
qinsoon's avatar
qinsoon committed
146 147
}

148
impl ASMCode {
qinsoon's avatar
qinsoon committed
149
    /// returns a vector of ASMLocation for all the uses of the given reg/temp
qinsoon's avatar
qinsoon committed
150 151 152 153 154 155 156
    fn get_use_locations(&self, reg: MuID) -> Vec<ASMLocation> {
        let mut ret = vec![];

        for inst in self.code.iter() {
            match inst.uses.get(&reg) {
                Some(ref locs) => {
                    ret.append(&mut locs.to_vec());
157
                }
qinsoon's avatar
qinsoon committed
158 159 160 161 162 163 164
                None => {}
            }
        }

        ret
    }

qinsoon's avatar
qinsoon committed
165
    /// returns a vector of ASMLocation for all the defines of the given reg/temp
qinsoon's avatar
qinsoon committed
166 167 168 169 170 171 172
    fn get_define_locations(&self, reg: MuID) -> Vec<ASMLocation> {
        let mut ret = vec![];

        for inst in self.code.iter() {
            match inst.defines.get(&reg) {
                Some(ref locs) => {
                    ret.append(&mut locs.to_vec());
173
                }
qinsoon's avatar
qinsoon committed
174 175 176 177 178 179 180
                None => {}
            }
        }

        ret
    }

qinsoon's avatar
qinsoon committed
181
    /// is the given instruction the starting instruction of a block?
qinsoon's avatar
qinsoon committed
182 183 184 185 186 187 188 189 190
    fn is_block_start(&self, inst: usize) -> bool {
        for block in self.blocks.values() {
            if block.start_inst == inst {
                return true;
            }
        }
        false
    }

qinsoon's avatar
qinsoon committed
191
    /// is the given instruction the ending instruction of a block?
192
    fn is_last_inst_in_block(&self, inst: usize) -> bool {
qinsoon's avatar
qinsoon committed
193 194 195 196 197 198 199 200
        for block in self.blocks.values() {
            if block.end_inst == inst + 1 {
                return true;
            }
        }
        false
    }

qinsoon's avatar
qinsoon committed
201
    /// finds block for a given instruction and returns the block
202
    fn get_block_by_inst(&self, inst: usize) -> (&MuName, &ASMBlock) {
qinsoon's avatar
qinsoon committed
203 204 205 206 207 208 209 210
        for (name, block) in self.blocks.iter() {
            if inst >= block.start_inst && inst < block.end_inst {
                return (name, block);
            }
        }
        panic!("didnt find any block for inst {}", inst)
    }

qinsoon's avatar
qinsoon committed
211 212
    /// finds block that starts with the given instruction
    /// returns None if we cannot find such block
qinsoon's avatar
qinsoon committed
213 214 215 216 217 218 219 220 221
    fn get_block_by_start_inst(&self, inst: usize) -> Option<&ASMBlock> {
        for block in self.blocks.values() {
            if block.start_inst == inst {
                return Some(block);
            }
        }
        None
    }

qinsoon's avatar
qinsoon committed
222 223 224 225 226 227
    /// rewrites code by inserting instructions in certain locations
    /// This function is used for inserting spilling instructions. It takes
    /// two hashmaps as arguments, the keys of which are line numbers where
    /// we should insert code, and the values are the code to be inserted.
    /// We need to carefully ensure the metadata for existing code is
    /// still correct after insertion. This function returns the resulting code.
228 229
    fn rewrite_insert(
        &self,
230
        insert_before: LinkedHashMap<usize, Vec<Box<ASMCode>>>,
231
        insert_after: LinkedHashMap<usize, Vec<Box<ASMCode>>>
232
    ) -> Box<ASMCode> {
233
        trace!("insert spilling code");
234 235
        let mut ret = ASMCode {
            name: self.name.clone(),
236
            entry: self.entry.clone(),
237
            code: vec![],
238
            blocks: linked_hashmap!{},
239
            frame_size_patchpoints: vec![]
240 241
        };

qinsoon's avatar
qinsoon committed
242 243
        // how many instructions have been inserted
        let mut inst_offset = 0;
qinsoon's avatar
qinsoon committed
244
        let mut cur_block_start = usize::MAX;
245

qinsoon's avatar
qinsoon committed
246 247
        // inst N in old machine code is N' in new machine code
        // this map stores the relationship
248
        let mut location_map: LinkedHashMap<usize, usize> = LinkedHashMap::new();
qinsoon's avatar
qinsoon committed
249

qinsoon's avatar
qinsoon committed
250
        // iterate through old machine code
251
        for i in 0..self.number_of_insts() {
252 253
            trace!("Inst{}", i);

qinsoon's avatar
qinsoon committed
254 255
            if self.is_block_start(i) {
                cur_block_start = i + inst_offset;
256
                trace!("  block start is shifted to {}", cur_block_start);
qinsoon's avatar
qinsoon committed
257 258
            }

259 260 261 262 263
            // insert code before this instruction
            if insert_before.contains_key(&i) {
                for insert in insert_before.get(&i).unwrap() {
                    ret.append_code_sequence_all(insert);
                    inst_offset += insert.number_of_insts();
264
                    trace!("  inserted {} insts before", insert.number_of_insts());
265 266 267 268
                }
            }

            // copy this instruction
qinsoon's avatar
qinsoon committed
269 270
            let mut inst = self.code[i].clone();

qinsoon's avatar
qinsoon committed
271 272
            // old ith inst is now the (i + inst_offset)th instruction
            location_map.insert(i, i + inst_offset);
273
            trace!("  Inst{} is now Inst{}", i, i + inst_offset);
qinsoon's avatar
qinsoon committed
274

qinsoon's avatar
qinsoon committed
275 276 277 278 279 280 281 282 283 284 285 286 287 288 289
            // this instruction has been offset by several instructions('inst_offset')
            // update its info
            // 1. fix defines and uses
            for locs in inst.defines.values_mut() {
                for loc in locs {
                    debug_assert!(loc.line == i);
                    loc.line += inst_offset;
                }
            }
            for locs in inst.uses.values_mut() {
                for loc in locs {
                    debug_assert!(loc.line == i);
                    loc.line += inst_offset;
                }
            }
290 291 292
            // 2. we need to delete existing preds/succs - CFA is required later
            inst.preds.clear();
            inst.succs.clear();
qinsoon's avatar
qinsoon committed
293 294
            // 3. add the inst
            ret.code.push(inst);
295 296 297 298


            // insert code after this instruction
            if insert_after.contains_key(&i) {
qinsoon's avatar
qinsoon committed
299 300 301
                for insert in insert_after.get(&i).unwrap() {
                    ret.append_code_sequence_all(insert);
                    inst_offset += insert.number_of_insts();
302
                    trace!("  inserted {} insts after", insert.number_of_insts());
qinsoon's avatar
qinsoon committed
303 304 305
                }
            }

qinsoon's avatar
qinsoon committed
306
            // if we finish a block
307 308
            if self.is_last_inst_in_block(i) {
                let cur_block_end = i + 1 + inst_offset;
309

qinsoon's avatar
qinsoon committed
310 311 312
                // copy the block
                let (name, block) = self.get_block_by_inst(i);

313
                let new_block = ASMBlock {
314 315 316 317
                    start_inst: cur_block_start,
                    end_inst: cur_block_end,

                    livein: vec![],
318
                    liveout: vec![]
319 320 321 322 323
                };

                trace!("  old block: {:?}", block);
                trace!("  new block: {:?}", new_block);

qinsoon's avatar
qinsoon committed
324 325 326 327
                cur_block_start = usize::MAX;

                // add to the new code
                ret.blocks.insert(name.clone(), new_block);
328 329 330
            }
        }

qinsoon's avatar
qinsoon committed
331
        // fix patchpoints
qinsoon's avatar
qinsoon committed
332 333 334 335
        for patchpoint in self.frame_size_patchpoints.iter() {
            let new_patchpoint = ASMLocation {
                line: *location_map.get(&patchpoint.line).unwrap(),
                index: patchpoint.index,
336
                len: patchpoint.len,
337
                oplen: patchpoint.oplen
qinsoon's avatar
qinsoon committed
338 339 340 341 342
            };

            ret.frame_size_patchpoints.push(new_patchpoint);
        }

qinsoon's avatar
qinsoon committed
343 344 345
        ret.control_flow_analysis();

        Box::new(ret)
346 347
    }

qinsoon's avatar
qinsoon committed
348 349
    /// appends a given part of assembly code sequence at the end of current code
    /// During appending, we need to fix line number.
350
    fn append_code_sequence(&mut self, another: &Box<ASMCode>, start_inst: usize, n_insts: usize) {
qinsoon's avatar
qinsoon committed
351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375
        let base_line = self.number_of_insts();

        for i in 0..n_insts {
            let cur_line_in_self = base_line + i;
            let cur_line_from_copy = start_inst + i;
            let mut inst = another.code[cur_line_from_copy].clone();

            // fix info
            for locs in inst.defines.values_mut() {
                for loc in locs {
                    debug_assert!(loc.line == i);
                    loc.line = cur_line_in_self;
                }
            }
            for locs in inst.uses.values_mut() {
                for loc in locs {
                    debug_assert!(loc.line == i);
                    loc.line = cur_line_in_self;
                }
            }
            // ignore preds/succs

            // add to self
            self.code.push(inst);
        }
376 377
    }

qinsoon's avatar
qinsoon committed
378
    /// appends assembly sequence at the end of current code
379 380 381 382
    fn append_code_sequence_all(&mut self, another: &Box<ASMCode>) {
        let n_insts = another.number_of_insts();
        self.append_code_sequence(another, 0, n_insts)
    }
qinsoon's avatar
qinsoon committed
383

qinsoon's avatar
qinsoon committed
384 385
    /// control flow analysis on current code
    /// calculating branch targets, preds/succs for each instruction
qinsoon's avatar
qinsoon committed
386
    fn control_flow_analysis(&mut self) {
387
        const TRACE_CFA: bool = true;
qinsoon's avatar
qinsoon committed
388 389 390 391 392 393

        // control flow analysis
        let n_insts = self.number_of_insts();
        let ref mut asm = self.code;

        for i in 0..n_insts {
394
            trace_if!(TRACE_CFA, "---inst {}---", i);
395 396 397 398 399 400

            // skip symbol
            if asm[i].is_symbol {
                continue;
            }

qinsoon's avatar
qinsoon committed
401 402 403 404 405
            // determine predecessor:
            // * if last instruction falls through to current instruction,
            //   the predecessor is last instruction
            // * otherwise, we set predecessor when we deal with the instruction
            //   that branches to current instruction
406 407 408 409 410 411 412 413 414 415
            if i != 0 {
                let last_inst = ASMCode::find_prev_inst(i, asm);
                match last_inst {
                    Some(last_inst) => {
                        let last_inst_branch = asm[last_inst].branch.clone();
                        match last_inst_branch {
                            // if it is a fallthrough, we set its preds as last inst
                            ASMBranchTarget::None => {
                                if !asm[i].preds.contains(&last_inst) {
                                    asm[i].preds.push(last_inst);
416 417 418 419 420 421
                                    trace_if!(
                                        TRACE_CFA,
                                        "inst {}: set PREDS as previous inst - fallthrough {}",
                                        i,
                                        last_inst
                                    );
422 423 424 425 426
                                }
                            }
                            // otherwise do nothing
                            _ => {}
                        }
qinsoon's avatar
qinsoon committed
427
                    }
428
                    None => {}
qinsoon's avatar
qinsoon committed
429 430 431 432
                }
            }

            // determine successor
qinsoon's avatar
qinsoon committed
433 434
            // make a clone so that we are not borrowing anything
            let branch = asm[i].branch.clone();
qinsoon's avatar
qinsoon committed
435 436
            match branch {
                ASMBranchTarget::Unconditional(ref target) => {
qinsoon's avatar
qinsoon committed
437
                    // branch-to target
qinsoon's avatar
qinsoon committed
438 439 440 441 442 443 444 445
                    let target_n = self.blocks.get(target).unwrap().start_inst;

                    // cur inst's succ is target
                    asm[i].succs.push(target_n);

                    // target's pred is cur
                    asm[target_n].preds.push(i);

446 447
                    trace_if!(TRACE_CFA, "inst {}: is a branch to {}", i, target);
                    trace_if!(TRACE_CFA, "inst {}: branch target index is {}", i, target_n);
448 449 450 451 452 453 454 455 456 457 458 459 460
                    trace_if!(
                        TRACE_CFA,
                        "inst {}: set SUCCS as branch target {}",
                        i,
                        target_n
                    );
                    trace_if!(
                        TRACE_CFA,
                        "inst {}: set PREDS as branch source {}",
                        target_n,
                        i
                    );
                }
qinsoon's avatar
qinsoon committed
461
                ASMBranchTarget::Conditional(ref target) => {
qinsoon's avatar
qinsoon committed
462
                    // branch-to target
qinsoon's avatar
qinsoon committed
463 464
                    let target_n = self.blocks.get(target).unwrap().start_inst;

465
                    // cur insts' succ is target
qinsoon's avatar
qinsoon committed
466 467
                    asm[i].succs.push(target_n);

468 469
                    trace_if!(TRACE_CFA, "inst {}: is a cond branch to {}", i, target);
                    trace_if!(TRACE_CFA, "inst {}: branch target index is {}", i, target_n);
470 471 472 473 474 475
                    trace_if!(
                        TRACE_CFA,
                        "inst {}: set SUCCS as branch target {}",
                        i,
                        target_n
                    );
qinsoon's avatar
qinsoon committed
476 477 478

                    // target's pred is cur
                    asm[target_n].preds.push(i);
479
                    trace_if!(TRACE_CFA, "inst {}: set PREDS as {}", target_n, i);
480 481 482 483 484 485 486 487

                    if let Some(next_inst) = ASMCode::find_next_inst(i, asm) {
                        // cur succ is next inst
                        asm[i].succs.push(next_inst);

                        // next inst's pred is cur
                        asm[next_inst].preds.push(i);

488 489 490 491 492 493
                        trace_if!(
                            TRACE_CFA,
                            "inst {}: SET SUCCS as c-branch fallthrough target {}",
                            i,
                            next_inst
                        );
494 495 496
                    } else {
                        panic!("conditional branch does not have a fallthrough target");
                    }
497
                }
498 499 500 501 502 503 504
                ASMBranchTarget::PotentiallyExcepting(ref target) => {
                    // may trigger exception and jump to target - similar as conditional branch
                    let target_n = self.blocks.get(target).unwrap().start_inst;

                    // cur inst's succ is target
                    asm[i].succs.push(target_n);

505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522
                    trace_if!(
                        TRACE_CFA,
                        "inst {}: is potentially excepting to {}",
                        i,
                        target
                    );
                    trace_if!(
                        TRACE_CFA,
                        "inst {}: excepting target index is {}",
                        i,
                        target_n
                    );
                    trace_if!(
                        TRACE_CFA,
                        "inst {}: set SUCCS as excepting target {}",
                        i,
                        target_n
                    );
523 524 525 526 527 528 529 530 531 532

                    asm[target_n].preds.push(i);

                    if let Some(next_inst) = ASMCode::find_next_inst(i, asm) {
                        // cur succ is next inst
                        asm[i].succs.push(next_inst);

                        // next inst's pred is cur
                        asm[next_inst].preds.push(i);

533 534 535 536 537 538
                        trace_if!(
                            TRACE_CFA,
                            "inst {}: SET SUCCS as PEI fallthrough target {}",
                            i,
                            next_inst
                        );
539 540 541
                    } else {
                        panic!("PEI does not have a fallthrough target");
                    }
542
                }
qinsoon's avatar
qinsoon committed
543
                ASMBranchTarget::Return => {
544 545
                    trace_if!(TRACE_CFA, "inst {}: is a return", i);
                    trace_if!(TRACE_CFA, "inst {}: has no successor", i);
qinsoon's avatar
qinsoon committed
546
                }
qinsoon's avatar
qinsoon committed
547 548
                ASMBranchTarget::None => {
                    // not branch nor cond branch, succ is next inst
549
                    trace_if!(TRACE_CFA, "inst {}: not a branch inst", i);
550
                    if let Some(next_inst) = ASMCode::find_next_inst(i, asm) {
551 552 553 554 555 556
                        trace_if!(
                            TRACE_CFA,
                            "inst {}: set SUCCS as next inst {}",
                            i,
                            next_inst
                        );
557
                        asm[i].succs.push(next_inst);
qinsoon's avatar
qinsoon committed
558 559 560 561
                    }
                }
            }
        }
562 563
    }

qinsoon's avatar
qinsoon committed
564
    /// finds the previous instruction (skip non-instruction assembly)
565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584
    fn find_prev_inst(i: usize, asm: &Vec<ASMInst>) -> Option<usize> {
        if i == 0 {
            None
        } else {
            let mut cur = i - 1;
            while cur != 0 {
                if !asm[cur].is_symbol {
                    return Some(cur);
                }

                if cur == 0 {
                    return None;
                } else {
                    cur -= 1;
                }
            }

            None
        }
    }
qinsoon's avatar
qinsoon committed
585

qinsoon's avatar
qinsoon committed
586
    /// finds the next instruction (skip non-instruction assembly)
587 588 589 590 591 592 593 594 595 596 597
    fn find_next_inst(i: usize, asm: &Vec<ASMInst>) -> Option<usize> {
        if i >= asm.len() - 1 {
            None
        } else {
            let mut cur = i + 1;
            while cur < asm.len() {
                if !asm[cur].is_symbol {
                    return Some(cur);
                }

                cur += 1;
qinsoon's avatar
qinsoon committed
598
            }
599 600

            None
qinsoon's avatar
qinsoon committed
601 602
        }
    }
qinsoon's avatar
qinsoon committed
603

qinsoon's avatar
qinsoon committed
604 605
    /// finds the last instruction that appears on or before the given index
    /// (skip non-instruction assembly)
606 607 608 609 610
    fn find_last_inst(i: usize, asm: &Vec<ASMInst>) -> Option<usize> {
        if i == 0 {
            None
        } else {
            let mut cur = i;
611
            loop {
612 613 614 615 616 617 618 619 620 621 622 623 624
                if !asm[cur].is_symbol {
                    return Some(cur);
                }

                if cur == 0 {
                    return None;
                } else {
                    cur -= 1;
                }
            }
        }
    }

qinsoon's avatar
qinsoon committed
625 626 627
    fn add_frame_size_patchpoint(&mut self, patchpoint: ASMLocation) {
        self.frame_size_patchpoints.push(patchpoint);
    }
628 629
}

630
impl MachineCode for ASMCode {
631 632 633
    fn as_any(&self) -> &Any {
        self
    }
qinsoon's avatar
qinsoon committed
634 635

    /// returns the count of instructions in this machine code
636 637 638
    fn number_of_insts(&self) -> usize {
        self.code.len()
    }
qinsoon's avatar
qinsoon committed
639 640

    /// is the specified index a move instruction?
641 642 643
    fn is_move(&self, index: usize) -> bool {
        let inst = self.code.get(index);
        match inst {
qinsoon's avatar
qinsoon committed
644 645 646 647 648 649 650 651 652 653 654 655 656 657 658
            Some(inst) => {
                let ref inst = inst.code;

                if inst.starts_with("movsd") || inst.starts_with("movss") {
                    // floating point move
                    true
                } else if inst.starts_with("movs") || inst.starts_with("movz") {
                    // sign extend, zero extend
                    false
                } else if inst.starts_with("mov") {
                    // normal mov
                    true
                } else {
                    false
                }
659
            }
660
            None => false
661 662
        }
    }
qinsoon's avatar
qinsoon committed
663 664

    /// is the specified index using memory operands?
qinsoon's avatar
qinsoon committed
665
    fn is_using_mem_op(&self, index: usize) -> bool {
qinsoon's avatar
qinsoon committed
666
        self.code[index].is_mem_op_used
qinsoon's avatar
qinsoon committed
667
    }
668

qinsoon's avatar
qinsoon committed
669 670
    /// is the specified index a jump instruction? (unconditional jump)
    /// returns an Option for target block
671 672 673 674
    fn is_jmp(&self, index: usize) -> Option<MuName> {
        let inst = self.code.get(index);
        match inst {
            Some(inst) if inst.code.starts_with("jmp") => {
675
                let split: Vec<&str> = inst.code.split(' ').collect();
676

677
                Some(demangle_name(String::from(split[1])))
678
            }
679
            _ => None
680 681 682
        }
    }

qinsoon's avatar
qinsoon committed
683
    /// is the specified index a label? returns an Option for the label
684 685 686 687
    fn is_label(&self, index: usize) -> Option<MuName> {
        let inst = self.code.get(index);
        match inst {
            Some(inst) if inst.code.ends_with(':') => {
688
                let split: Vec<&str> = inst.code.split(':').collect();
689

690
                Some(demangle_name(String::from(split[0])))
691
            }
692
            _ => None
693 694
        }
    }
qinsoon's avatar
qinsoon committed
695

qinsoon's avatar
qinsoon committed
696 697
    /// is the specified index loading a spilled register?
    /// returns an Option for the register that is loaded into
qinsoon's avatar
qinsoon committed
698 699 700 701
    fn is_spill_load(&self, index: usize) -> Option<P<Value>> {
        if let Some(inst) = self.code.get(index) {
            match inst.spill_info {
                Some(SpillMemInfo::Load(ref p)) => Some(p.clone()),
702
                _ => None
qinsoon's avatar
qinsoon committed
703 704 705 706 707 708
            }
        } else {
            None
        }
    }

qinsoon's avatar
qinsoon committed
709 710
    /// is the specified index storing a spilled register?
    /// returns an Option for the register that is stored
qinsoon's avatar
qinsoon committed
711 712 713 714
    fn is_spill_store(&self, index: usize) -> Option<P<Value>> {
        if let Some(inst) = self.code.get(index) {
            match inst.spill_info {
                Some(SpillMemInfo::Store(ref p)) => Some(p.clone()),
715
                _ => None
qinsoon's avatar
qinsoon committed
716 717 718 719 720
            }
        } else {
            None
        }
    }
qinsoon's avatar
qinsoon committed
721 722

    /// gets successors of a specified index
qinsoon's avatar
qinsoon committed
723
    fn get_succs(&self, index: usize) -> &Vec<usize> {
qinsoon's avatar
qinsoon committed
724
        &self.code[index].succs
qinsoon's avatar
qinsoon committed
725
    }
qinsoon's avatar
qinsoon committed
726 727

    /// gets predecessors of a specified index
qinsoon's avatar
qinsoon committed
728
    fn get_preds(&self, index: usize) -> &Vec<usize> {
qinsoon's avatar
qinsoon committed
729
        &self.code[index].preds
qinsoon's avatar
qinsoon committed
730
    }
qinsoon's avatar
qinsoon committed
731 732

    /// gets the register uses of a specified index
qinsoon's avatar
qinsoon committed
733 734
    fn get_inst_reg_uses(&self, index: usize) -> Vec<MuID> {
        self.code[index].uses.keys().map(|x| *x).collect()
735
    }
qinsoon's avatar
qinsoon committed
736 737

    /// gets the register defines of a specified index
qinsoon's avatar
qinsoon committed
738 739
    fn get_inst_reg_defines(&self, index: usize) -> Vec<MuID> {
        self.code[index].defines.keys().map(|x| *x).collect()
740
    }
qinsoon's avatar
qinsoon committed
741 742

    /// replace a temp with a machine register (to_reg must be a machine register)
743
    fn replace_reg(&mut self, from: MuID, to: MuID) {
qinsoon's avatar
qinsoon committed
744
        // replace defines
qinsoon's avatar
qinsoon committed
745 746
        for loc in self.get_define_locations(from) {
            let ref mut inst_to_patch = self.code[loc.line];
747 748 749

            // pick the right reg based on length
            let to_reg = x86_64::get_alias_for_length(to, loc.oplen);
750
            let to_reg_tag = to_reg.name();
751 752
            let to_reg_string = "%".to_string() + &to_reg_tag;

753 754 755 756
            string_utils::replace(
                &mut inst_to_patch.code,
                loc.index,
                &to_reg_string,
757
                to_reg_string.len()
758
            );
759
        }
760

qinsoon's avatar
qinsoon committed
761
        // replace uses
qinsoon's avatar
qinsoon committed
762 763
        for loc in self.get_use_locations(from) {
            let ref mut inst_to_patch = self.code[loc.line];
764 765 766

            // pick the right reg based on length
            let to_reg = x86_64::get_alias_for_length(to, loc.oplen);
767
            let to_reg_tag = to_reg.name();
768 769
            let to_reg_string = "%".to_string() + &to_reg_tag;

770 771 772 773
            string_utils::replace(
                &mut inst_to_patch.code,
                loc.index,
                &to_reg_string,
774
                to_reg_string.len()
775
            );
776 777
        }
    }
778

qinsoon's avatar
qinsoon committed
779
    /// replace a temp that is defined in the inst with another temp
780
    fn replace_define_tmp_for_inst(&mut self, from: MuID, to: MuID, inst: usize) {
781
        let to_reg_string: MuName = REG_PLACEHOLDER.clone();
782

qinsoon's avatar
qinsoon committed
783 784 785 786 787 788
        let asm = &mut self.code[inst];
        // if this reg is defined, replace the define
        if asm.defines.contains_key(&from) {
            let define_locs = asm.defines.get(&from).unwrap().to_vec();
            // replace temps
            for loc in define_locs.iter() {
789 790 791 792
                string_utils::replace(
                    &mut asm.code,
                    loc.index,
                    &to_reg_string,
793
                    to_reg_string.len()
794
                );
795 796
            }

qinsoon's avatar
qinsoon committed
797 798
            // remove old key, insert new one
            asm.defines.remove(&from);
799
            asm.defines.insert(to, define_locs);
800
        }
801 802
    }

qinsoon's avatar
qinsoon committed
803
    /// replace a temp that is used in the inst with another temp
804
    fn replace_use_tmp_for_inst(&mut self, from: MuID, to: MuID, inst: usize) {
805
        let to_reg_string: MuName = REG_PLACEHOLDER.clone();
806 807

        let asm = &mut self.code[inst];
qinsoon's avatar
qinsoon committed
808 809 810 811 812 813

        // if this reg is used, replace the use
        if asm.uses.contains_key(&from) {
            let use_locs = asm.uses.get(&from).unwrap().to_vec();
            // replace temps
            for loc in use_locs.iter() {
814 815 816 817
                string_utils::replace(
                    &mut asm.code,
                    loc.index,
                    &to_reg_string,
818
                    to_reg_string.len()
819
                );
820 821
            }

qinsoon's avatar
qinsoon committed
822 823
            // remove old key, insert new one
            asm.uses.remove(&from);
824
            asm.uses.insert(to, use_locs);
825 826
        }
    }
qinsoon's avatar
qinsoon committed
827

828
    /// replace destination for a jump instruction
829
    fn replace_branch_dest(&mut self, inst: usize, old_succ: usize, new_dest: &str, succ: MuID) {
830 831 832
        {
            let asm = &mut self.code[inst];

833 834 835 836
            asm.code = format!(
                "jmp {}",
                symbol(&mangle_name(Arc::new(new_dest.to_string())))
            );
837
            asm.succs.retain(|&x| x != old_succ);
838 839 840 841
            asm.succs.push(succ);
        }
        {
            let asm = &mut self.code[succ];
842

843 844 845 846
            if !asm.preds.contains(&inst) {
                asm.preds.push(inst);
            }
        }
847 848
    }

qinsoon's avatar
qinsoon committed
849
    /// set an instruction as nop
850
    fn set_inst_nop(&mut self, index: usize) {
851
        self.code[index].code.clear();
852
    }
qinsoon's avatar
qinsoon committed
853

854 855 856 857 858 859 860 861 862 863
    /// is the specified index is a nop?
    fn is_nop(&self, index: usize) -> bool {
        let ref inst = self.code[index];
        if inst.code == "" || inst.code == "nop" {
            true
        } else {
            false
        }
    }

qinsoon's avatar
qinsoon committed
864 865 866
    /// remove unnecessary push/pop if the callee saved register is not used
    /// returns what registers push/pop have been deleted, and the number of callee saved registers
    /// that weren't deleted
867
    fn remove_unnecessary_callee_saved(&mut self, used_callee_saved: Vec<MuID>) -> HashSet<MuID> {
qinsoon's avatar
qinsoon committed
868 869 870
        // we always save rbp
        let rbp = x86_64::RBP.extract_ssa_id().unwrap();

871
        let find_op_other_than_rbp = |inst: &ASMInst| -> MuID {
qinsoon's avatar
qinsoon committed
872
            for id in inst.defines.keys() {
873 874
                if *id != rbp {
                    return *id;
qinsoon's avatar
qinsoon committed
875 876 877
                }
            }
            for id in inst.uses.keys() {
878 879
                if *id != rbp {
                    return *id;
qinsoon's avatar
qinsoon committed
880 881
                }
            }
882
            panic!("Expected to find a used register other than the rbp");
qinsoon's avatar
qinsoon committed
883 884 885
        };

        let mut inst_to_remove = vec![];
886
        let mut regs_to_remove = HashSet::new();
qinsoon's avatar
qinsoon committed
887 888 889

        for i in 0..self.number_of_insts() {
            let ref inst = self.code[i];
890 891 892 893
            match inst.spill_info {
                Some(SpillMemInfo::CalleeSaved) => {
                    let reg = find_op_other_than_rbp(inst);
                    if !used_callee_saved.contains(&reg) {
894
                        trace!(
qinsoon's avatar
qinsoon committed
895 896
                            "removing instruction {:?} for save/restore \
                             unnecessary callee saved regs",
897 898
                            inst
                        );
899 900
                        regs_to_remove.insert(reg);
                        inst_to_remove.push(i);
qinsoon's avatar
qinsoon committed
901 902
                    }
                }
903
                _ => {}
qinsoon's avatar
qinsoon committed
904 905 906 907 908 909 910
            }
        }

        for i in inst_to_remove {
            self.set_inst_nop(i);
        }

911
        regs_to_remove
qinsoon's avatar
qinsoon committed
912
    }
qinsoon's avatar
qinsoon committed
913

qinsoon's avatar
qinsoon committed
914
    /// patch frame size
915
    fn patch_frame_size(&mut self, size: usize) {
qinsoon's avatar
qinsoon committed
916
        let size = size.to_string();
qinsoon's avatar
qinsoon committed
917
        assert!(size.len() <= FRAME_SIZE_PLACEHOLDER_LEN);
qinsoon's avatar
qinsoon committed
918 919 920 921 922 923

        for loc in self.frame_size_patchpoints.iter() {
            let ref mut inst = self.code[loc.line];
            string_utils::replace(&mut inst.code, loc.index, &size, size.len());
        }
    }
qinsoon's avatar
qinsoon committed
924 925

    /// emit the machine code as a byte array
926 927
    fn emit(&self) -> Vec<u8> {
        let mut ret = vec![];
928

929
        for inst in self.code.iter() {
930 931 932 933
            if !inst.is_symbol {
                ret.append(&mut "\t".to_string().into_bytes());
            }

934 935 936
            ret.append(&mut inst.code.clone().into_bytes());
            ret.append(&mut "\n".to_string().into_bytes());
        }
937

938 939
        ret
    }
940

qinsoon's avatar
qinsoon committed
941
    /// emit the machine instruction at the given index as a byte array
942 943 944 945 946 947 948 949 950 951 952 953 954
    fn emit_inst(&self, index: usize) -> Vec<u8> {
        let mut ret = vec![];

        let ref inst = self.code[index];

        if !inst.is_symbol {
            ret.append(&mut "\t".to_string().into_bytes());
        }

        ret.append(&mut inst.code.clone().into_bytes());

        ret
    }
qinsoon's avatar
qinsoon committed
955 956

    /// print the whole machine code by trace level log
957 958 959
    fn trace_mc(&self) {
        trace!("");
        trace!("code for {}: \n", self.name);
960

961 962
        let n_insts = self.code.len();
        for i in 0..n_insts {
963
            self.trace_inst(i);
964
        }
965 966

        trace!("")
967
    }
qinsoon's avatar
qinsoon committed
968 969

    /// print an inst for the given index
970
    fn trace_inst(&self, i: usize) {
971 972 973
        trace!(
            "#{}\t{:60}\t\tdefine: {:?}\tuses: {:?}\tpred: {:?}\tsucc: {:?}",
            i,
974
            demangle_text(&self.code[i].code),
975 976 977 978 979
            self.get_inst_reg_defines(i),
            self.get_inst_reg_uses(i),
            self.code[i].preds,
            self.code[i].succs
        );
980
    }
qinsoon's avatar
qinsoon committed
981 982

    /// gets block livein
983
    fn get_ir_block_livein(&self, block: &str) -> Option<&Vec<MuID>> {
984
        match self.blocks.get(&block.to_string()) {
qinsoon's avatar
qinsoon committed
985
            Some(ref block) => Some(&block.livein),
986
            None => None
qinsoon's avatar
qinsoon committed
987
        }
988
    }
qinsoon's avatar
qinsoon committed
989 990

    /// gets block liveout
991
    fn get_ir_block_liveout(&self, block: &str) -> Option<&Vec<MuID>> {
992
        match self.blocks.get(&block.to_string()) {
qinsoon's avatar
qinsoon committed
993
            Some(ref block) => Some(&block.liveout),
994
            None => None
qinsoon's avatar
qinsoon committed
995
        }
996
    }
qinsoon's avatar
qinsoon committed
997 998

    /// sets block livein
999
    fn set_ir_block_livein(&mut self, block: &str, set: Vec<MuID>) {
1000
        let block = self.blocks.get_mut(&block.to_string()).unwrap();
qinsoon's avatar
qinsoon committed
1001
        block.livein = set;
1002
    }
qinsoon's avatar
qinsoon committed
1003 1004

    /// sets block liveout
1005
    fn set_ir_block_liveout(&mut self, block: &str, set: Vec<MuID>) {
1006
        let block = self.blocks.get_mut(&block.to_string()).unwrap();
qinsoon's avatar
qinsoon committed
1007
        block.liveout = set;
1008
    }
qinsoon's avatar
qinsoon committed
1009 1010

    /// gets all the blocks
qinsoon's avatar
qinsoon committed
1011 1012
    fn get_all_blocks(&self) -> Vec<MuName> {
        self.blocks.keys().map(|x| x.clone()).collect()
1013
    }
1014

qinsoon's avatar
qinsoon committed
1015
    /// gets the entry block
1016 1017 1018
    fn get_entry_block(&self) -> MuName {
        self.entry.clone()
    }
qinsoon's avatar
qinsoon committed
1019 1020

    /// gets the range of a given block, returns [start_inst, end_inst) (end_inst not included)
1021
    fn get_block_range(&self, block: &str) -> Option<ops::Range<usize>> {
1022
        match self.blocks.get(&block.to_string()) {
qinsoon's avatar
qinsoon committed
1023
            Some(ref block) => Some(block.start_inst..block.end_inst),
1024
            None => None
1025 1026
        }
    }
1027

qinsoon's avatar
qinsoon committed
1028
    /// gets the block for a given index, returns an Option for the block
1029 1030 1031 1032 1033 1034 1035 1036 1037
    fn get_block_for_inst(&self, index: usize) -> Option<MuName> {
        for (name, block) in self.blocks.iter() {
            if index >= block.start_inst && index < block.end_inst {
                return Some(name.clone());
            }
        }
        None
    }

qinsoon's avatar
qinsoon committed
1038
    /// gets the next instruction of a specified index (labels are not instructions)
1039 1040 1041 1042
    fn get_next_inst(&self, index: usize) -> Option<usize> {
        ASMCode::find_next_inst(index, &self.code)
    }

qinsoon's avatar
qinsoon committed
1043
    /// gets the previous instruction of a specified index (labels are not instructions)
1044 1045 1046
    fn get_last_inst(&self, index: usize) -> Option<usize> {
        ASMCode::find_last_inst(index, &self.code)
    }
1047 1048
}

qinsoon's avatar
qinsoon committed
1049
impl ASMInst {
qinsoon's avatar
qinsoon committed
1050
    /// creates a symbolic assembly code (not an instruction)
qinsoon's avatar
qinsoon committed
1051 1052
    fn symbolic(line: String) -> ASMInst {
        ASMInst {
1053
            code: line,
1054 1055
            defines: LinkedHashMap::new(),
            uses: LinkedHashMap::new(),
qinsoon's avatar
qinsoon committed
1056
            is_mem_op_used: false,
1057
            is_symbol: true,
qinsoon's avatar
qinsoon committed
1058 1059
            preds: vec![],
            succs: vec![],
1060 1061
            branch: ASMBranchTarget::None,

1062
            spill_info: None
1063 1064
        }
    }
qinsoon's avatar
qinsoon committed
1065 1066

    /// creates an instruction
qinsoon's avatar
qinsoon committed
1067 1068
    fn inst(
        inst: String,
1069 1070
        defines: LinkedHashMap<MuID, Vec<ASMLocation>>,
        uses: LinkedHashMap<MuID, Vec<ASMLocation>>,
qinsoon's avatar
qinsoon committed
1071
        is_mem_op_used: bool,
1072
        target: ASMBranchTarget,
1073
        spill_info: Option<SpillMemInfo>
1074
    ) -> ASMInst {
qinsoon's avatar
qinsoon committed
1075
        ASMInst {
1076 1077
            code: inst,
            defines: defines,
qinsoon's avatar
qinsoon committed
1078
            uses: uses,
1079
            is_symbol: false,
qinsoon's avatar
qinsoon committed
1080 1081 1082
            is_mem_op_used: is_mem_op_used,
            preds: vec![],
            succs: vec![],
1083 1084
            branch: target,

1085
            spill_info: spill_info
1086 1087
        }
    }
qinsoon's avatar
qinsoon committed
1088 1089

    /// creates a nop instruction
qinsoon's avatar
qinsoon committed
1090 1091
    fn nop() -> ASMInst {
        ASMInst {
1092
            code: "".to_string(),
1093 1094
            defines: LinkedHashMap::new(),
            uses: LinkedHashMap::new(),
1095
            is_symbol: false,
qinsoon's avatar
qinsoon committed
1096 1097 1098
            is_mem_op_used: false,
            preds: vec![],
            succs: vec![],
1099 1100
            branch: ASMBranchTarget::None,

1101
            spill_info: None
1102 1103
        }
    }
1104 1105 1106
}

impl ASMLocation {
1107
    fn new(line: usize, index: usize, len: usize, oplen: usize) -> ASMLocation {
1108
        ASMLocation {
qinsoon's avatar
qinsoon committed
1109
            line: line,
1110
            index: index,
1111
            len: len,
1112
            oplen: oplen
1113 1114 1115 1116
        }
    }
}

qinsoon's avatar
qinsoon committed
1117 1118 1119 1120 1121 1122
impl ASMBlock {
    fn new() -> ASMBlock {
        ASMBlock {
            start_inst: usize::MAX,
            end_inst: usize::MAX,
            livein: vec![],
1123
            liveout: vec![]
qinsoon's avatar
qinsoon committed
1124 1125 1126 1127
        }
    }
}

qinsoon's avatar
qinsoon committed
1128
/// ASMCodeGen is the assembly backend that implements CodeGenerator.
1129
pub struct ASMCodeGen {
1130
    cur: Option<Box<ASMCode>>
1131 1132
}

qinsoon's avatar
qinsoon committed
1133
/// placeholder in assembly code for a temporary
1134
const REG_PLACEHOLDER_LEN: usize = 5;
1135
lazy_static! {
1136
    pub static ref REG_PLACEHOLDER : MuName = {
1137
        let blank_spaces = [' ' as u8; REG_PLACEHOLDER_LEN];
1138
        Arc::new(format!("%{}", str::from_utf8(&blank_spaces).unwrap()))
1139
    };
1140 1141
}

qinsoon's avatar
qinsoon committed
1142 1143
/// placeholder in assembly code for a frame size
//  this is a fairly random number, but a frame is something smaller than 10^10
1144
const FRAME_SIZE_PLACEHOLDER_LEN: usize = 10;
qinsoon's avatar
qinsoon committed
1145 1146 1147 1148 1149 1150 1151
lazy_static! {
    pub static ref FRAME_SIZE_PLACEHOLDER : String = {
        let blank_spaces = [' ' as u8; FRAME_SIZE_PLACEHOLDER_LEN];
        format!("{}", str::from_utf8(&blank_spaces).unwrap())
    };
}

1152 1153
impl ASMCodeGen {
    pub fn new() -> ASMCodeGen {
1154
        ASMCodeGen { cur: None }
1155
    }
qinsoon's avatar
qinsoon committed
1156 1157

    /// returns a reference to current assembly code that is being constructed
1158 1159 1160
    fn cur(&self) -> &ASMCode {
        self.cur.as_ref().unwrap()
    }
qinsoon's avatar
qinsoon committed
1161 1162

    /// returns a mutable reference to current assembly code that is being constructed
1163 1164 1165
    fn cur_mut(&mut self) -> &mut ASMCode {
        self.cur.as_mut().unwrap()
    }
qinsoon's avatar
qinsoon committed
1166 1167

    /// returns current line number (also the index for next instruction)
1168 1169 1170
    fn line(&self) -> usize {
        self.cur().code.len()
    }
qinsoon's avatar
qinsoon committed
1171

1172
    /// starst a block
1173
    fn start_block_internal(&mut self, block_name: MuName) {
1174 1175 1176
        self.cur_mut()
            .blocks
            .insert(block_name.clone(), ASMBlock::new());
1177
        let start = self.line();
1178 1179 1180 1181 1182
        self.cur_mut()
            .blocks
            .get_mut(&block_name)
            .unwrap()
            .start_inst = start;
1183
    }
1184

1185
    /// appends .global to current code
1186 1187 1188
    fn add_asm_global_label(&mut self, label: String) {
        self.add_asm_symbolic(directive_globl(label.clone()));
        self.add_asm_label(label);
1189
    }
qinsoon's avatar
qinsoon committed
1190

1191
    /// appends .equiv to current code
1192 1193 1194 1195 1196
    fn add_asm_global_equiv(&mut self, name: String, target: String) {
        self.add_asm_symbolic(directive_globl(name.clone()));
        self.add_asm_symbolic(directive_equiv(name, target));
    }

1197
    /// appends an label to current code
1198 1199
    fn add_asm_label(&mut self, label: String) {
        self.add_asm_symbolic(format!("{}:", label));
1200
    }
1201

1202
    /// appends a symbolic assembly to current node
1203
    fn add_asm_symbolic(&mut self, code: String) {
qinsoon's avatar
qinsoon committed
1204
        self.cur_mut().code.push(ASMInst::symbolic(code));
1205
    }
1206

qinsoon's avatar
qinsoon committed
1207 1208 1209 1210
    /// appends a call instruction. In this instruction:
    /// * return registers are defined
    /// * caller saved registers are defined
    /// * user supplied registers
1211 1212 1213 1214
    fn add_asm_call(
        &mut self,
        code: String,
        potentially_excepting: Option<MuName>,
1215 1216
        use_vec: Vec<P<Value>>,
        def_vec: Vec<P<Value>>,
1217
        target: Option<(MuID, ASMLocation)>
1218 1219 1220 1221 1222 1223
    ) {
        let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
        if target.is_some() {
            let (id, loc) = target.unwrap();
            uses.insert(id, vec![loc]);
        }
1224 1225
        for u in use_vec {
            uses.insert(u.id(), vec![]);
1226 1227 1228
        }

        let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
1229 1230
        for d in def_vec {
            defines.insert(d.id(), vec![]);
qinsoon's avatar
qinsoon committed
1231
        }
1232

1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244
        self.add_asm_inst_internal(
            code,
            defines,
            uses,
            false,
            {
                if potentially_excepting.is_some() {
                    ASMBranchTarget::PotentiallyExcepting(potentially_excepting.unwrap())
                } else {
                    ASMBranchTarget::None
                }
            },
1245
            None
1246
        )
1247
    }
qinsoon's avatar
qinsoon committed
1248 1249 1250


    /// appends a return instruction
1251
    fn add_asm_ret(&mut self, code: String) {
1252 1253
        // return instruction does not use anything (not RETURN REGS)
        // otherwise it will keep RETURN REGS alive
qinsoon's avatar
qinsoon committed
1254 1255
        // and if there is no actual move into RETURN REGS, it will keep RETURN REGS for alive
        // for very long and prevents anything using those registers
1256 1257 1258 1259 1260 1261
        self.add_asm_inst_internal(
            code,
            linked_hashmap!{},
            linked_hashmap!{},
            false,
            ASMBranchTarget::Return,
1262
            None
1263
        );
1264
    }
qinsoon's avatar
qinsoon committed
1265 1266

    /// appends an unconditional branch instruction
1267
    fn add_asm_branch(&mut self, code: String, target: MuName) {
1268 1269 1270 1271 1272 1273
        self.add_asm_inst_internal(
            code,
            linked_hashmap!{},
            linked_hashmap!{},
            false,
            ASMBranchTarget::Unconditional(target),
1274
            None
1275
        );
1276
    }
qinsoon's avatar
qinsoon committed
1277 1278

    /// appends a conditional branch instruction
1279
    fn add_asm_branch2(&mut self, code: String, target: MuName) {
1280 1281