WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

Commit 77733b5f authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano
Browse files

More formatting fixes (and fixed machine code graph for aarch64)

parent 28f4ed79
......@@ -1118,14 +1118,26 @@ impl fmt::Display for Value {
if DISPLAY_TYPE {
match self.v {
Value_::SSAVar(_) => write!(f, "<{}>{}", self.ty, self.hdr),
Value_::Constant(ref c) => write!(f, "<{}>{}", self.ty, c),
Value_::Constant(ref c) => {
if self.is_func_const() {
write!(f, "{}", c)
} else {
write!(f, "<{}>{}", self.ty, c)
}
}
Value_::Global(ref ty) => write!(f, "<{}>@{}", ty, self.hdr),
Value_::Memory(ref mem) => write!(f, "<{}>{}{}", self.ty, self.hdr, mem)
}
} else {
match self.v {
Value_::SSAVar(_) => write!(f, "{}", self.hdr),
Value_::Constant(ref c) => write!(f, "<{}>{}", self.ty, c),
Value_::Constant(ref c) => {
if self.is_func_const() {
write!(f, "{}", c)
} else {
write!(f, "<{}>{}", self.ty, c)
}
}
Value_::Global(_) => write!(f, "@{}", self.hdr),
Value_::Memory(ref mem) => write!(f, "{}{}", self.hdr, mem)
}
......@@ -1261,7 +1273,7 @@ impl fmt::Display for Constant {
&Constant::Float(v) => write!(f, "{}", v),
&Constant::Double(v) => write!(f, "{}", v),
// &Constant::IRef(v) => write!(f, "{}", v),
&Constant::FuncRef(ref v) => write!(f, "FuncRef {}", v.name),
&Constant::FuncRef(ref v) => write!(f, "{}", v.name),
&Constant::Vector(ref v) => {
// TODO: Make this Muc compatible?
write!(f, "[").unwrap();
......
......@@ -713,13 +713,13 @@ impl MachineCode for ASMCode {
}
}
fn replace_branch_dest(&mut self, inst: usize, new_dest: &str, succ: usize) {
fn replace_branch_dest(&mut self, inst: usize, old_succ: usize, new_dest: &str, succ: usize) {
{
let asm = &mut self.code[inst];
let inst = String::from(asm.code.split_whitespace().next().unwrap());
asm.code = format!("{} {}", inst, mangle_name(Arc::new(new_dest.to_string())));
asm.succs.clear();
asm.succs.retain(|&x| x != old_succ);
asm.succs.push(succ);
}
{
......@@ -732,10 +732,7 @@ impl MachineCode for ASMCode {
}
fn set_inst_nop(&mut self, index: usize) {
let ref mut inst = self.code[index];
inst.code.clear();
inst.defines.clear();
inst.uses.clear();
self.code[index].code.clear();
}
fn remove_unnecessary_callee_saved(&mut self, used_callee_saved: Vec<MuID>) -> HashSet<MuID> {
......
......@@ -178,8 +178,11 @@ impl<'a> InstructionSelection {
self.backend.emit_b(branch_target);
} else {
self.backend.emit_b_cond(cond[0], branch_target.clone());
if cond.len() == 2 {
self.finish_block();
self.start_block(
make_block_name(&node.name(), "second_condition")
);
self.backend.emit_b_cond(cond[1], branch_target);
}
}
......@@ -188,6 +191,7 @@ impl<'a> InstructionSelection {
let cond_reg = self.emit_ireg(cond, f_content, f_context, vm);
self.backend.emit_tbnz(&cond_reg, 0, branch_target.clone());
};
self.finish_block();
}
Instruction_::Select {
......@@ -316,6 +320,7 @@ impl<'a> InstructionSelection {
trace!("emit branch1");
// jmp
self.backend.emit_b(target);
self.finish_block();
}
Instruction_::Switch {
......@@ -377,6 +382,7 @@ impl<'a> InstructionSelection {
let default_target = f_content.get_block(default.target.id()).name();
self.backend.emit_b(default_target);
self.finish_block();
} else {
panic!("expecting cond in switch to be ireg: {}", cond);
}
......@@ -1015,7 +1021,6 @@ impl<'a> InstructionSelection {
// load_start:
self.start_block(blk_load_start.clone());
// Load the value:
if use_acquire {
self.backend.emit_ldaxp(&res_l, &res_h, &temp_loc);
......@@ -1034,7 +1039,11 @@ impl<'a> InstructionSelection {
}
// If the store failed, then branch back to 'load_start:'
self.backend.emit_cbnz(&success, blk_load_start.clone())
self.backend.emit_cbnz(&success, blk_load_start.clone());
self.finish_block();
self.start_block(
make_block_name(&node.name(), "load_finished")
);
}
}
} else {
......@@ -1179,7 +1188,11 @@ impl<'a> InstructionSelection {
}
// If the store failed, then branch back to 'store_start:'
self.backend.emit_cbnz(&success, blk_store_start.clone())
self.backend.emit_cbnz(&success, blk_store_start.clone());
self.finish_block();
self.start_block(
make_block_name(&node.name(), "store_finished")
)
}
}
} else {
......@@ -1312,7 +1325,8 @@ impl<'a> InstructionSelection {
self.backend.emit_fcmp(&res_value, &expected);
}
self.backend.emit_b_cond("NE", blk_cmpxchg_failed.clone());
self.finish_block();
self.start_block(make_block_name(&node.name(), "cmpxchg_store"));
if use_release {
match desired.ty.v {
// Have to store a temporary GPR
......@@ -1813,8 +1827,8 @@ impl<'a> InstructionSelection {
CALLER_SAVED_REGS.to_vec(),
true
);
self.record_callsite(None, callsite.unwrap(), 0);
self.finish_block();
}
// Runtime Entry
......@@ -4672,13 +4686,18 @@ impl<'a> InstructionSelection {
self.record_callsite(resumption, callsite.unwrap(), res_stack_size);
if resumption.is_some() {
self.finish_block();
let block_name = make_block_name(&node.name(), "stack_resumption");
self.start_block(block_name);
}
self.emit_unload_arguments(inst.value.as_ref().unwrap(), res_locs, f_context, vm);
emit_add_u64(self.backend.as_mut(), &SP, &SP, res_stack_size as u64);
if resumption.is_some() {
self.backend
.emit_b(resumption.as_ref().unwrap().normal_dest.target.name());
self.finish_block();
}
}
}
......
......@@ -826,7 +826,7 @@ impl MachineCode for ASMCode {
}
/// replace destination for a jump instruction
fn replace_branch_dest(&mut self, inst: usize, new_dest: &str, succ: MuID) {
fn replace_branch_dest(&mut self, inst: usize, old_succ: usize, new_dest: &str, succ: MuID) {
{
let asm = &mut self.code[inst];
......@@ -834,7 +834,7 @@ impl MachineCode for ASMCode {
"jmp {}",
symbol(&mangle_name(Arc::new(new_dest.to_string())))
);
asm.succs.clear();
asm.succs.retain(|&x| x != old_succ);
asm.succs.push(succ);
}
{
......
......@@ -125,7 +125,7 @@ pub fn emit_mu_types(suffix: &str, vm: &VM) {
writeln!(file, "{}", struct_ty).unwrap();
writeln!(file, "\n\t/*{}*/", vm.get_backend_type_info(ty.id())).unwrap();
} else if ty.is_hybrid() {
write!(file, "{}", ty).unwrap();
write!(file, ".typedef {} = ", ty.hdr).unwrap();
let hybrid_ty = hybrid_map
.get(&ty.get_struct_hybrid_tag().unwrap())
.unwrap();
......@@ -162,7 +162,7 @@ pub fn emit_mu_globals(suffix: &str, vm: &VM) {
let global_guard = vm.globals().read().unwrap();
for g in global_guard.values() {
write!(
writeln!(
file,
".global {}<{}>",
g.name(),
......
......@@ -163,7 +163,7 @@ impl PeepholeOptimization {
// the instruction that we may rewrite
let orig_inst = inst;
// the destination we will rewrite the instruction to branch to
let final_dest: Option<MuName> = {
let dests: Option<(MuName, MuName)> = {
use std::collections::HashSet;
let mut cur_inst = inst;
......@@ -216,7 +216,7 @@ impl PeepholeOptimization {
Some(ref dest2) => {
// its a jump-to-jump case
cur_inst = first_inst;
last_dest = Some(dest2.clone());
last_dest = Some((dest.clone(), dest2.clone()));
}
None => break
}
......@@ -227,17 +227,18 @@ impl PeepholeOptimization {
last_dest
};
if let Some(dest) = final_dest {
let first_inst = mc.get_block_range(&dest).unwrap().start;
if let Some((old_dest, final_dest)) = dests {
let first_inst = mc.get_block_range(&final_dest).unwrap().start;
let old_first_inst = mc.get_block_range(&old_dest).unwrap().start;
info!(
"inst {} chain jumps to {}, rewrite as branching to {} (successor: {})",
orig_inst,
dest,
dest,
final_dest,
final_dest,
first_inst
);
mc.replace_branch_dest(inst, &dest, first_inst);
mc.replace_branch_dest(inst, old_first_inst, &final_dest, first_inst);
}
}
}
......@@ -229,7 +229,7 @@ pub trait MachineCode {
/// replace a temp that is used in the inst with another temp
fn replace_use_tmp_for_inst(&mut self, from: MuID, to: MuID, inst: usize);
/// replace destination for an unconditional branch instruction
fn replace_branch_dest(&mut self, inst: usize, new_dest: &str, succ: usize);
fn replace_branch_dest(&mut self, inst: usize, old_succ: usize, new_dest: &str, succ: usize);
/// set an instruction as nop
fn set_inst_nop(&mut self, index: usize);
/// remove unnecessary push/pop if the callee saved register is not used
......
......@@ -121,7 +121,7 @@ fn test_type_constructors() {
assert_type!(t7_struct_ty, "struct<int<8> float>");
}
assert_type!(*types[8], "array<int<8> 5>");
assert_type!(*types[9], "MyHybridTag1(hybrid)");
assert_type!(*types[9], "MyHybridTag1");
assert_type!(*types[10], "void");
assert_type!(*types[11], "threadref");
assert_type!(*types[12], "stackref");
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment