To protect your data, the CISO officer has suggested users to enable GitLab 2FA as soon as possible.

Commit 2aba65ad authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano
Browse files

Fixed most bugs in name handling...

parent 90d2b8a3
......@@ -995,7 +995,7 @@ impl fmt::Display for Constant {
}
#[cfg(target_arch = "x86_64")]
rodal_enum!(MemoryLocation{{Address: scale, base, offset, index}, {Symbolic: is_global, base, label, is_native}});
rodal_enum!(MemoryLocation{{Address: scale, base, offset, index}, {Symbolic: is_global, is_native, base, label}});
#[cfg(target_arch = "x86_64")]
#[derive(Debug, Clone, PartialEq)]
pub enum MemoryLocation {
......@@ -1042,7 +1042,7 @@ impl fmt::Display for MemoryLocation {
}
#[cfg(target_arch = "aarch64")]
rodal_enum!(MemoryLocation{{VirtualAddress: signed, base, offset, scale}, {Address: base, offset, shift, signed}, {Symbolic: label, is_global, is_native}});
rodal_enum!(MemoryLocation{{VirtualAddress: signed, base, offset, scale}, {Address: base, offset, shift, signed}, {Symbolic: is_global, is_native, label}});
#[cfg(target_arch = "aarch64")]
#[derive(Debug, Clone, PartialEq)]
pub enum MemoryLocation {
......
......@@ -109,7 +109,7 @@ pub trait CodeGenerator {
// branching
// calls
fn emit_bl(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation;
fn emit_bl(&mut self, callsite: String, func: MuName, pe: Option<MuName>, is_native: bool) -> ValueLocation;
fn emit_blr(&mut self, callsite: String, func: Reg, pe: Option<MuName>) -> ValueLocation;
// Branches
......
......@@ -36,6 +36,7 @@ use compiler::backend::PROLOGUE_BLOCK_NAME;
use compiler::backend::EPILOGUE_BLOCK_NAME;
use compiler::backend::aarch64::*;
use compiler::backend::make_block_name;
use compiler::machine_code::CompiledFunction;
use compiler::frame::Frame;
......@@ -51,6 +52,7 @@ pub struct InstructionSelection {
backend: Box<CodeGenerator>,
current_fv_id: MuID,
current_fv_name: MuName,
current_callsite_id: usize,
current_frame: Option<Frame>,
current_block: Option<MuName>,
......@@ -78,6 +80,7 @@ impl <'a> InstructionSelection {
backend: Box::new(ASMCodeGen::new()),
current_fv_id: 0,
current_fv_name: String::new(),
current_callsite_id: 0,
current_frame: None,
current_block: None,
......@@ -186,7 +189,7 @@ impl <'a> InstructionSelection {
// it is possible that the fallthrough block is scheduled somewhere else
// we need to explicitly jump to it
self.finish_block();
let fallthrough_temp_block = format!("{}_{}_branch_fallthrough", self.current_fv_id, node.id());
let fallthrough_temp_block = make_block_name(node, "branch_fallthrough", );
self.start_block(fallthrough_temp_block, &vec![]);
let fallthrough_target = f_content.get_block(fallthrough_dest.target).name();
......@@ -340,7 +343,7 @@ impl <'a> InstructionSelection {
self.backend.emit_b_cond("EQ", target);
self.finish_block();
self.start_block(format!("{}_switch_not_met_case_{}", node.id(), case_op_index), &vec![]);
self.start_block(make_block_name(node, format!("switch_not_met_case_{}", case_op_index).as_str()), &vec![]);
}
// emit default
......@@ -624,9 +627,9 @@ impl <'a> InstructionSelection {
// overflows to_ty_size, but not to_ty_reg_size
let to_ty_reg_size = check_op_len(&tmp_res.ty); // The size of the aarch64 register
if to_ty_size != to_ty_reg_size {
let blk_positive = format!("{}_positive", node.id());
let blk_negative = format!("{}_negative", node.id());
let blk_end = format!("{}_end", node.id());
let blk_positive = make_block_name(node, "positive");
let blk_negative = make_block_name(node, "negative");
let blk_end = make_block_name(node, "end");
let tmp = make_temporary(f_context, to_ty.clone(), vm);
self.backend.emit_tbnz(&tmp_res, (to_ty_size - 1) as u8, blk_negative.clone());
......@@ -743,7 +746,7 @@ impl <'a> InstructionSelection {
self.finish_block();
let blk_load_start = format!("{}_load_start", node.id());
let blk_load_start = make_block_name(node, "load_start");
// load_start:
self.start_block(blk_load_start.clone(), &vec![temp_loc.clone()]);
......@@ -845,7 +848,7 @@ impl <'a> InstructionSelection {
self.finish_block();
let blk_store_start = format!("{}_store_start", node.id());
let blk_store_start = make_block_name(node, "store_start");
// store_start:
self.start_block(blk_store_start.clone(), &vec![temp_loc.clone()]);
......@@ -912,9 +915,9 @@ impl <'a> InstructionSelection {
let res_value = self.get_result_value(node, 0);
let res_success = self.get_result_value(node, 1);
let blk_cmpxchg_start = format!("{}_cmpxchg_start", node.id());
let blk_cmpxchg_failed = format!("{}_cmpxchg_failed", node.id());
let blk_cmpxchg_succeded = format!("{}_cmpxchg_succeded", node.id());
let blk_cmpxchg_start = make_block_name(node, "cmpxchg_start", node.id());
let blk_cmpxchg_failed = make_block_name(node, "cmpxchg_failed", node.id());
let blk_cmpxchg_succeded = make_block_name(node, "cmpxchg_succeded", node.id());
self.finish_block();
......@@ -2702,8 +2705,8 @@ impl <'a> InstructionSelection {
// emit: ALLOC_LARGE:
// emit: >> large object alloc
// emit: ALLOC_LARGE_END:
let blk_alloc_large = format!("{}_alloc_large", node.id());
let blk_alloc_large_end = format!("{}_alloc_large_end", node.id());
let blk_alloc_large = make_block_name(node, "alloc_large");
let blk_alloc_large_end = make_block_name(node, "alloc_large_end");
if OBJECT_HEADER_SIZE != 0 {
let size_with_hdr = make_temporary(f_context, UINT64_TYPE.clone(), vm);
......@@ -2715,7 +2718,7 @@ impl <'a> InstructionSelection {
self.backend.emit_b_cond("GT", blk_alloc_large.clone());
self.finish_block();
self.start_block(format!("{}_allocsmall", node.id()), &vec![]);
self.start_block(make_block_name(node, "allocsmall"), &vec![]);
self.emit_alloc_sequence_small(tmp_allocator.clone(), size.clone(), align, node, f_context, vm);
self.backend.emit_b(blk_alloc_large_end.clone());
......@@ -2827,7 +2830,7 @@ impl <'a> InstructionSelection {
} else if n == 1 {
tys[0].clone()
} else {
P(MuType::new(new_internal_id(), MuType_::mustruct(format!("return_type#{}", new_internal_id()), tys.to_vec())))
P(MuType::new(new_internal_id(), MuType_::mustruct(format!("#{}", new_internal_id()), tys.to_vec())))
}
}
......@@ -3344,7 +3347,7 @@ impl <'a> InstructionSelection {
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_bl(callsite.clone(), func_name, None); // assume ccall wont throw exception
self.backend.emit_bl(callsite.clone(), func_name, None, true); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0, stack_arg_size));
......@@ -3499,7 +3502,7 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_bl(callsite, target.name().unwrap(), potentially_excepting)
self.backend.emit_bl(callsite, target.name().unwrap(), potentially_excepting, false)
}
} else {
let target = self.emit_ireg(func, f_content, f_context, vm);
......@@ -3556,8 +3559,7 @@ impl <'a> InstructionSelection {
}
fn emit_common_prologue(&mut self, args: &Vec<P<Value>>, sig: &P<CFuncSig>, f_context: &mut FunctionContext, vm: &VM) {
// no livein
self.start_block(PROLOGUE_BLOCK_NAME.to_string(), &vec![]);
self.start_block(format!("{}:{}", self.current_fv_name, PROLOGUE_BLOCK_NAME), &vec![]);
// Push the frame pointer and link register onto the stack
self.backend.emit_push_pair(&LR, &FP, &SP);
......@@ -3666,7 +3668,7 @@ impl <'a> InstructionSelection {
// Live in are the registers that hold the return values
// (if the value is returned through 'XR' than the caller is responsible for managing lifetime)
let livein = self.compute_return_registers(&ret_type, vm);
self.start_block(EPILOGUE_BLOCK_NAME.to_string(), &livein);
self.start_block(format!("{}:{}", self.current_fv_name, EPILOGUE_BLOCK_NAME), &livein);
// pop all callee-saved registers
for i in (0..CALLEE_SAVED_FPRs.len()).rev() {
......@@ -4354,9 +4356,9 @@ impl <'a> InstructionSelection {
fn new_callsite_label(&mut self, cur_node: Option<&TreeNode>) -> String {
let ret = {
if cur_node.is_some() {
format!("callsite_{}_{}_{}", self.current_fv_id, cur_node.unwrap().id(), self.current_callsite_id)
make_block_name(cur_node.unwrap(), format!("callsite_{}", self.current_callsite_id).as_str())
} else {
format!("callsite_{}_anon_{}", self.current_fv_id, self.current_callsite_id)
format!("{}:callsite_{}", self.current_fv_name, self.current_callsite_id)
}
};
self.current_callsite_id += 1;
......@@ -4391,6 +4393,7 @@ impl CompilerPass for InstructionSelection {
let entry_block = func_ver.content.as_ref().unwrap().get_entry_block();
self.current_fv_id = func_ver.id();
self.current_fv_name = func_ver.name();
self.current_frame = Some(Frame::new(func_ver.id()));
self.current_func_start = Some({
let funcs = vm.funcs().read().unwrap();
......
......@@ -821,7 +821,6 @@ impl MachineCode for ASMCode {
return Some(name.clone());
}
}
None
}
......@@ -1001,15 +1000,25 @@ impl ASMCodeGen {
fn line(&self) -> usize {
self.cur().code.len()
}
fn add_asm_label(&mut self, code: String) {
let l = self.line();
self.cur_mut().code.push(ASMInst::symbolic(code));
fn start_block_internal(&mut self, block_name: MuName) {
self.cur_mut().blocks.insert(block_name.clone(), ASMBlock::new());
let start = self.line();
self.cur_mut().blocks.get_mut(&block_name).unwrap().start_inst = start;
}
fn add_asm_block_label(&mut self, code: String, block_name: MuName) {
let l = self.line();
self.cur_mut().code.push(ASMInst::symbolic(code));
fn add_asm_global_label(&mut self, label: String) {
self.add_asm_symbolic(directive_globl(label.clone()));
self.add_asm_label(label);
}
fn add_asm_global_equiv(&mut self, name: String, target: String) {
self.add_asm_symbolic(directive_globl(name.clone()));
self.add_asm_symbolic(directive_equiv(name, target));
}
fn add_asm_label(&mut self, label: String) {
self.add_asm_symbolic(format!("{}:", label));
}
fn add_asm_symbolic(&mut self, code: String){
......@@ -1330,6 +1339,17 @@ impl ASMCodeGen {
}
}
fn mangle_block_label(&self, label: MuName) -> String {
format!("{}_{}", self.cur().name, label)
}
fn unmangle_block_label(fn_name: MuName, label: String) -> MuName {
// input: _fn_name_BLOCK_NAME
// return BLOCK_NAME
let split : Vec<&str> = label.splitn(2, &(fn_name + "_")).collect();
String::from(split[1])
}
fn finish_code_sequence_asm(&mut self) -> Box<ASMCode> {
self.cur.take().unwrap()
}
......@@ -2007,12 +2027,9 @@ impl CodeGenerator for ASMCodeGen {
// to link with C sources via gcc
let func_symbol = symbol(mangle_name(func_name.clone()));
self.add_asm_symbolic(directive_globl(func_symbol.clone()));
self.add_asm_symbolic(format!("{}:", func_symbol.clone()));
self.add_asm_global_label(func_symbol.clone());
if is_valid_c_identifier(&func_name) {
let demangled_name = symbol(func_name.clone());
self.add_asm_symbolic(directive_globl(demangled_name.clone()));
self.add_asm_symbolic(directive_equiv(demangled_name, func_symbol.clone()));
self.add_asm_global_equiv(symbol(func_name.clone()), func_symbol);
}
ValueLocation::Relocatable(RegGroup::GPR, func_name)
......@@ -2024,9 +2041,7 @@ impl CodeGenerator for ASMCodeGen {
symbol.push_str(":end");
symbol
};
let func_end_sym = symbol(mangle_name(func_end.clone()));
self.add_asm_symbolic(directive_globl(func_end_sym.clone()));
self.add_asm_symbolic(format!("{}:", func_end_sym));
self.add_asm_global_label(symbol(mangle_name(func_end.clone())));
self.cur.as_mut().unwrap().control_flow_analysis();
......@@ -2070,21 +2085,15 @@ impl CodeGenerator for ASMCodeGen {
}
fn start_block(&mut self, block_name: MuName) {
let label = format!("{}:", mangle_name(block_name.clone()));
self.add_asm_block_label(label, block_name.clone());
self.cur_mut().blocks.insert(block_name.clone(), ASMBlock::new());
let start = self.line();
self.cur_mut().blocks.get_mut(&block_name).unwrap().start_inst = start;
self.add_asm_label(mangle_name(block_name.clone()));
self.start_block_internal(block_name);
}
fn start_exception_block(&mut self, block_name: MuName) -> ValueLocation {
let mangled_name = mangle_name(block_name.clone());
self.add_asm_symbolic(directive_globl(symbol(mangled_name.clone())));
self.start_block(block_name.clone());
self.add_asm_global_label(mangle_name(block_name.clone()));
self.start_block_internal(block_name.clone());
ValueLocation::Relocatable(RegGroup::GPR, mangled_name)
ValueLocation::Relocatable(RegGroup::GPR, block_name)
}
fn end_block(&mut self, block_name: MuName) {
......@@ -2998,46 +3007,38 @@ impl CodeGenerator for ASMCodeGen {
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>, is_native: bool) -> ValueLocation {
if is_native {
trace!("emit: call {}", func);
trace!("emit: call /*C*/ {}", func);
} else {
trace!("emit: ccall {}", func);
trace!("emit: call {}", func);
}
let callsite = mangle_name(callsite);
let func = if is_native {
"/*C*/".to_string() + func.as_str()
"/*C*/".to_string() + symbol(func).as_str()
} else {
mangle_name(func)
symbol(mangle_name(func))
};
let asm = if cfg!(target_os = "macos") {
format!("call {}", symbol(func))
format!("call {}", func)
} else {
format!("call {}@PLT", symbol(func))
format!("call {}@PLT", func)
};
self.add_asm_call(asm, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
let callsite = mangle_name(callsite);
let (reg, id, loc) = self.prepare_reg(func, 6);
let asm = format!("call *{}", reg);
self.add_asm_call_with_extra_uses(asm, linked_hashmap!{id => vec![loc]}, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
......@@ -3360,7 +3361,7 @@ use std::fs::File;
pub fn emit_code(fv: &mut MuFunctionVersion, vm: &VM) {
use std::io::prelude::*;
use std::path;
let funcs = vm.funcs().read().unwrap();
let func = funcs.get(&fv.func_id).unwrap().read().unwrap();
......@@ -3372,30 +3373,53 @@ pub fn emit_code(fv: &mut MuFunctionVersion, vm: &VM) {
let mut file_path = path::PathBuf::new();
file_path.push(&vm.vm_options.flag_aot_emit_dir);
file_path.push(func.name() + ".s");
let mut file = match File::create(file_path.as_path()) {
Err(why) => panic!("couldn't create emission file {}: {}", file_path.to_str().unwrap(), why),
Ok(file) => file
};
file_path.push(func.name() + ".S");
{
let mut file = match File::create(file_path.as_path()) {
Err(why) => panic!("couldn't create emission file {}: {}", file_path.to_str().unwrap(), why),
Ok(file) => file
};
// constants in text section
file.write("\t.text\n".as_bytes()).unwrap();
// constants in text section
file.write("\t.text\n".as_bytes()).unwrap();
// FIXME: need a more precise way to determine alignment
// (probably use alignment backend info, which require introducing int128 to zebu)
write_const_min_align(&mut file);
// FIXME: need a more precise way to determine alignment
// (probably use alignment backend info, which require introducing int128 to zebu)
write_const_min_align(&mut file);
for (id, constant) in cf.consts.iter() {
let mem = cf.const_mem.get(id).unwrap();
for (id, constant) in cf.consts.iter() {
let mem = cf.const_mem.get(id).unwrap();
write_const(&mut file, constant.clone(), mem.clone());
}
write_const(&mut file, constant.clone(), mem.clone());
// write code
let code = cf.mc.as_ref().unwrap().emit();
match file.write_all(code.as_slice()) {
Err(why) => panic!("couldn'd write to file {}: {}", file_path.to_str().unwrap(), why),
Ok(_) => info!("emit code to {}", file_path.to_str().unwrap())
}
}
// Read the file we just wrote above an demangle it
{
let mut demangled_path = path::PathBuf::new();
demangled_path.push(&vm.vm_options.flag_aot_emit_dir);
demangled_path.push(func.name() + ".demangled.S");
// write code
let code = cf.mc.as_ref().unwrap().emit();
match file.write_all(code.as_slice()) {
Err(why) => panic!("couldn'd write to file {}: {}", file_path.to_str().unwrap(), why),
Ok(_) => info!("emit code to {}", file_path.to_str().unwrap())
let mut demangled_file = match File::create(demangled_path.as_path()) {
Err(why) => panic!("couldn't create demangled emission file {}: {}", demangled_path.to_str().unwrap(), why),
Ok(file) => file
};
let mut mangled_file = match File::open(file_path.as_path()) {
Err(why) => panic!("couldn't create demangled emission file {}: {}", demangled_path.to_str().unwrap(), why),
Ok(file) => file
};
let mut f = String::new();
mangled_file.read_to_string(&mut f).unwrap();
let d = demangle_text(f);
match demangled_file.write_all(d.as_bytes()) {
Err(why) => panic!("couldn'd write to file {}: {}", demangled_path.to_str().unwrap(), why),
Ok(_) => info!("emit demangled code to {}", demangled_path.to_str().unwrap())
}
}
}
......@@ -3441,7 +3465,7 @@ fn write_const(f: &mut File, constant: P<Value>, loc: P<Value>) {
Value_::Memory(MemoryLocation::Symbolic{ref label, ..}) => label.clone(),
_ => panic!("expecing a symbolic memory location for constant {}, found {}", constant, loc)
};
f.write_fmt(format_args!("{}:\n", symbol(label))).unwrap();
writeln!(f, "{}:", symbol(mangle_name(label))).unwrap();
write_const_value(f, constant);
}
......@@ -3552,7 +3576,7 @@ pub fn emit_context_with_reloc(vm: &VM,
// merge symbols with relocatable_refs
for (addr, str) in symbols {
relocatable_refs.insert(addr, str);
relocatable_refs.insert(addr, mangle_name(str));
}
for obj_dump in objects.values() {
......@@ -3611,7 +3635,7 @@ pub fn emit_context_with_reloc(vm: &VM,
// write uptr (or other relocatable value) with label
let label = fields.get(&cur_addr).unwrap();
file.write_fmt(format_args!("\t.quad {}\n", symbol(label.clone()))).unwrap();
file.write_fmt(format_args!("\t.quad {}\n", symbol(mangle_name(label.clone())))).unwrap();
} else {
// write plain word (as bytes)
let next_word_addr = cur_addr.plus(POINTER_SIZE);
......
......@@ -4955,7 +4955,6 @@ impl CompilerPass for InstructionSelection {
vm.add_exception_callsite(Callsite::new(callsite.clone(), block_loc, stack_arg_size), self.current_fv_id);
}
let compiled_func = CompiledFunction::new(func.func_id, func.id(), mc,
self.current_constants.clone(), self.current_constants_locs.clone(),
frame, self.current_func_start.take().unwrap(), func_end);
......
......@@ -81,16 +81,16 @@ pub fn emit_mu_types(vm: &VM) {
for ty in ty_guard.values() {
if ty.is_struct() {
file.write_fmt(format_args!("{}", ty)).unwrap();
write!(file, "{}", ty).unwrap();
let struct_ty = struct_map.get(&ty.get_struct_hybrid_tag().unwrap()).unwrap();
file.write_fmt(format_args!(" -> {}\n", struct_ty)).unwrap();
file.write_fmt(format_args!(" {}\n", vm.get_backend_type_info(ty.id()))).unwrap();
writeln!(file, " -> {}", struct_ty).unwrap();
writeln!(file, " {}", vm.get_backend_type_info(ty.id())).unwrap();
} else if ty.is_hybrid() {
file.write_fmt(format_args!("{}", ty)).unwrap();
write!(file, "{}", ty).unwrap();
let hybrid_ty = hybrid_map.get(&ty.get_struct_hybrid_tag().unwrap()).unwrap();
file.write_fmt(format_args!(" -> {}\n", hybrid_ty)).unwrap();
file.write_fmt(format_args!(" {}\n", vm.get_backend_type_info(ty.id()))).unwrap();
writeln!(file, " -> {}", hybrid_ty).unwrap();
writeln!(file, " {}", vm.get_backend_type_info(ty.id())).unwrap();
} else {
// we only care about struct
}
......@@ -118,7 +118,7 @@ fn emit_muir(func: &MuFunctionVersion, vm: &VM) {
Ok(file) => file
};
file.write_fmt(format_args!("{:?}", func)).unwrap();
write!(file, "{:?}", func).unwrap();
}
// original IR (source/input)
......@@ -131,13 +131,13 @@ fn emit_muir(func: &MuFunctionVersion, vm: &VM) {
Ok(file) => file
};
file.write_fmt(format_args!("FuncVer {} of Func #{}\n", func.hdr, func.func_id)).unwrap();
file.write_fmt(format_args!("Signature: {}\n", func.sig)).unwrap();
file.write_fmt(format_args!("IR:\n")).unwrap();
writeln!(file, "FuncVer {} of Func #{}", func.hdr, func.func_id).unwrap();
writeln!(file, "Signature: {}", func.sig).unwrap();
writeln!(file, "IR:").unwrap();
if func.get_orig_ir().is_some() {
file.write_fmt(format_args!("{:?}\n", func.get_orig_ir().as_ref().unwrap())).unwrap();
writeln!(file, "{:?}", func.get_orig_ir().as_ref().unwrap()).unwrap();
} else {
file.write_fmt(format_args!("Empty\n")).unwrap();
writeln!(file, "Empty").unwrap();
}
}
}
......@@ -182,35 +182,35 @@ fn emit_muir_dot_inner(file: &mut File,
use utils::vec_utils;
// digraph func {
file.write_fmt(format_args!("digraph {} {{\n", f_name)).unwrap();
writeln!(file, "digraph {} {{", mangle_name(f_name)).unwrap();
// node shape: rect
file.write("node [shape=rect];\n".as_bytes()).unwrap();
writeln!(file, "node [shape=rect];").unwrap();
// every graph node (basic block)
for (id, block) in f_content.blocks.iter() {
let block_name = block.name();
// BBid [label = "name
file.write_fmt(format_args!("BB{} [label = \"[{}]{} ", *id, *id, &block_name)).unwrap();
write!(file, "BB{} [label = \"[{}]{} ", *id, *id, &block_name).unwrap();
let block_content = block.content.as_ref().unwrap();
// (args)
file.write_fmt(format_args!("{}", vec_utils::as_str(&block_content.args))).unwrap();
write!(file, "{}", vec_utils::as_str(&block_content.args)).unwrap();
if block_content.exn_arg.is_some() {
// [exc_arg]
file.write_fmt(format_args!("[{}]", block_content.exn_arg.as_ref().unwrap())).unwrap();
write!(file, "[{}]", block_content.exn_arg.as_ref().unwrap()).unwrap();
}
// :\n\n
file.write(":\\l\\l".as_bytes()).unwrap();
write!(file, ":\\l\\l").unwrap();
// all the instructions
for inst in block_content.body.iter() {
file.write_fmt(format_args!("{}\\l", inst)).unwrap();
write!(file, "{}\\l", inst).unwrap();
}
// "];
file.write("\"];\n".as_bytes()).unwrap();
writeln!(file, "\"];").unwrap();
}
// every edge
......@@ -228,28 +228,28 @@ fn emit_muir_dot_inner(file: &mut File,
match inst.v {
Branch1(ref dest) => {