GitLab will be upgraded to the 12.10.14-ce.0 on 28 Sept 2020 at 2.00pm (AEDT) to 2.30pm (AEDT). During the update, GitLab and Mattermost services will not be available. If you have any concerns with this, please talk to us at N110 (b) CSIT building.

Commit 90d2b8a3 authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Updated name handling (with pretty printing!)

parent 85da4191
......@@ -995,7 +995,7 @@ impl fmt::Display for Constant {
}
#[cfg(target_arch = "x86_64")]
rodal_enum!(MemoryLocation{{Address: scale, base, offset, index}, {Symbolic: is_global, base, label}});
rodal_enum!(MemoryLocation{{Address: scale, base, offset, index}, {Symbolic: is_global, base, label, is_native}});
#[cfg(target_arch = "x86_64")]
#[derive(Debug, Clone, PartialEq)]
pub enum MemoryLocation {
......@@ -1008,7 +1008,8 @@ pub enum MemoryLocation {
Symbolic{
base: Option<P<Value>>,
label: MuName,
is_global: bool
is_global: bool,
is_native: bool,
}
}
......@@ -1041,7 +1042,7 @@ impl fmt::Display for MemoryLocation {
}
#[cfg(target_arch = "aarch64")]
rodal_enum!(MemoryLocation{{VirtualAddress: signed, base, offset, scale}, {Address: base, offset, shift, signed}, {Symbolic: label, is_global}});
rodal_enum!(MemoryLocation{{VirtualAddress: signed, base, offset, scale}, {Address: base, offset, shift, signed}, {Symbolic: label, is_global, is_native}});
#[cfg(target_arch = "aarch64")]
#[derive(Debug, Clone, PartialEq)]
pub enum MemoryLocation {
......@@ -1064,7 +1065,8 @@ pub enum MemoryLocation {
},
Symbolic{
label: MuName,
is_global: bool
is_global: bool,
is_native: bool,
}
}
......@@ -1172,6 +1174,57 @@ pub fn demangle_name(name: MuName) -> MuName {
name
}
// TODO: Why the hell isn't this working?
pub fn demangle_text(text: String) -> String {
let text = text.as_bytes();
let n = text.len();
let mut output = String::new();
// We have a mangled name
let mut last_i = 0; // The last i value that we dumped to output
let mut i = 0;
// TODO: this should work for utf-8 stuff right? (sinces all mangled names are in ascii)
while i < n {
let c = text[i] as char;
// We're at the beginining of the string
// wait for a word boundry
if c.is_alphanumeric() || c == '_' {
// We just found a mangled name
if text[i..].starts_with("__mu_".as_bytes()) {
output += std::str::from_utf8(&text[last_i..i]).unwrap();
let start = i;
// Find the end of the name
while i < n {
let c = text[i] as char;
if !c.is_alphanumeric() && c != '_' {
break; // We found the end!
}
i += 1;
}
output += demangle_name(String::from_utf8(text[start..i].to_vec()).unwrap()).as_str();
// Skip to the end of the name
last_i = i;
continue;
} else {
// Skip to the end of this alphanumeric sequence
while i < n {
let c = text[i] as char;
if !c.is_alphanumeric() && c != '_' {
break; // We found the end!
}
i += 1;
}
}
continue;
}
// Not the start of mangled name, continue
i += 1;
}
output + std::str::from_utf8(&text[last_i..n]).unwrap() // Return output plus whatever is left of the string
}
impl MuEntityHeader {
pub fn unnamed(id: MuID) -> MuEntityHeader {
......
......@@ -14,8 +14,6 @@
#[macro_use]
extern crate rodal;
#[macro_use]
extern crate log;
extern crate simple_logger;
#[macro_use]
extern crate lazy_static;
......
......@@ -756,7 +756,7 @@ impl MachineCode for ASMCode {
fn trace_inst(&self, i: usize) {
trace!("#{}\t{:30}\t\tdefine: {:?}\tuses: {:?}\tpred: {:?}\tsucc: {:?}",
i, self.code[i].code, self.get_inst_reg_defines(i), self.get_inst_reg_uses(i),
i, demangle_text(self.code[i].code), self.get_inst_reg_defines(i), self.get_inst_reg_uses(i),
self.code[i].preds, self.code[i].succs);
}
......
......@@ -2449,8 +2449,9 @@ fn make_value_symbolic(label: MuName, global: bool, ty: &P<MuType>, vm: &VM) ->
hdr: MuEntityHeader::unnamed(vm.next_id()),
ty : ty.clone(),
v : Value_::Memory(MemoryLocation::Symbolic {
label: mangle_name(label)),
is_global: global
label: label,
is_global: global,
is_native: false,
})
})
}
......
......@@ -771,11 +771,11 @@ impl MachineCode for ASMCode {
}
fn trace_inst(&self, i: usize) {
trace!("#{}\t{:30}\t\tdefine: {:?}\tuses: {:?}\tpred: {:?}\tsucc: {:?}",
i, self.code[i].code, self.get_inst_reg_defines(i), self.get_inst_reg_uses(i),
trace!("#{}\t{:60}\t\tdefine: {:?}\tuses: {:?}\tpred: {:?}\tsucc: {:?}",
i, demangle_text(self.code[i].code.clone()), self.get_inst_reg_defines(i), self.get_inst_reg_uses(i),
self.code[i].preds, self.code[i].succs);
}
fn get_ir_block_livein(&self, block: &str) -> Option<&Vec<MuID>> {
match self.blocks.get(block) {
Some(ref block) => Some(&block.livein),
......@@ -1117,7 +1117,7 @@ impl ASMCodeGen {
spill_info: Option<SpillMemInfo>)
{
let line = self.line();
trace!("asm: {}", code);
trace!("asm: {}", demangle_text(code.clone()));
trace!(" defines: {:?}", defines);
trace!(" uses: {:?}", uses);
let mc = self.cur_mut();
......@@ -1259,7 +1259,8 @@ impl ASMCodeGen {
result_str.push(')');
loc_cursor += 1;
},
Value_::Memory(MemoryLocation::Symbolic{ref base, ref label, is_global}) => {
Value_::Memory(MemoryLocation::Symbolic{ref base, ref label, is_global, is_native}) => {
let label = if is_native { "/*C*/".to_string() + label.as_str() } else { mangle_name(label.clone()) };
if base.is_some() && base.as_ref().unwrap().id() == x86_64::RIP.id() && is_global {
// pc relative address
let pic_symbol = pic_symbol(label.clone());
......@@ -2995,37 +2996,35 @@ impl CodeGenerator for ASMCodeGen {
self.add_asm_branch2(asm, dest_name);
}
#[cfg(target_os = "macos")]
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>, is_native: bool) -> ValueLocation {
if is_native {
trace!("emit: call {}", func);
} else {
trace!("emit: ccall {}", func);
}
let callsite = mangle_name(callsite);
let asm = format!("call {}", symbol(func));
self.add_asm_call(asm, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
let func = if is_native {
"/*C*/".to_string() + func.as_str()
} else {
mangle_name(func)
};
#[cfg(target_os = "linux")]
// generating Position-Independent Code using PLT
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
let asm = if cfg!(target_os = "macos") {
format!("call {}", symbol(func))
} else {
format!("call {}@PLT", symbol(func))
};
let callsite = mangle_name(callsite);
let asm = format!("call {}@PLT", symbol(func));
self.add_asm_call(asm, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
let callsite = mangle_name(callsite);
......
......@@ -213,7 +213,7 @@ pub trait CodeGenerator {
fn emit_js(&mut self, dest: MuName);
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation;
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>, is_native: bool) -> ValueLocation;
fn emit_call_near_r64 (&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation;
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation;
......
......@@ -3187,7 +3187,7 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite.clone(), func_name, None); // assume ccall wont throw exception
self.backend.emit_call_near_rel32(callsite.clone(), func_name, None, true); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0, stack_arg_size));
......@@ -3350,7 +3350,7 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_call_near_rel32(callsite, mangle_name(target.name()), potentially_excepting)
self.backend.emit_call_near_rel32(callsite, target.name(), potentially_excepting, false)
}
} else if self.match_ireg(func) {
let target = self.emit_ireg(func, f_content, f_context, vm);
......@@ -3372,14 +3372,14 @@ impl <'a> InstructionSelection {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
self.current_callsites.push_back((demangle_name(callsite.to_relocatable()), target_block, stack_arg_size));
self.current_callsites.push_back((callsite.to_relocatable(), target_block, stack_arg_size));
// insert an intermediate block to branch to normal
// the branch is inserted later (because we need to deal with postcall convention)
self.finish_block();
self.start_block(make_block_name(cur_node, "normal_cont_for_call"));
} else {
self.current_callsites.push_back((demangle_name(callsite.to_relocatable()), 0, stack_arg_size));
self.current_callsites.push_back((callsite.to_relocatable(), 0, stack_arg_size));
}
// deal with ret vals, collapse stack etc.
......@@ -4126,8 +4126,9 @@ impl <'a> InstructionSelection {
ty: types::get_referent_ty(&pv.ty).unwrap(),
v: Value_::Memory(MemoryLocation::Symbolic {
base: Some(x86_64::RIP.clone()),
label: mangle_name(pv.name()),
label: pv.name(),
is_global: true,
is_native: false,
})
})
} else if cfg!(target_os = "linux") {
......@@ -4139,8 +4140,9 @@ impl <'a> InstructionSelection {
ty: pv.ty.clone(),
v: Value_::Memory(MemoryLocation::Symbolic {
base: Some(x86_64::RIP.clone()),
label: mangle_name(pv.name()),
is_global: true
label: pv.name(),
is_global: true,
is_native: false,
})
});
......@@ -4758,8 +4760,9 @@ impl <'a> InstructionSelection {
ty : ADDRESS_TYPE.clone(),
v : Value_::Memory(MemoryLocation::Symbolic {
base: Some(x86_64::RIP.clone()),
label: mangle_name(name.clone()),
is_global: false
label: name.clone(),
is_global: false,
is_native: false,
})
})
}
......@@ -4782,8 +4785,9 @@ impl <'a> InstructionSelection {
ty : ADDRESS_TYPE.clone(),
v : Value_::Memory(MemoryLocation::Symbolic {
base: Some(x86_64::RIP.clone()),
label: mangle_name(func_name),
is_global: true
label: func_name,
is_global: true,
is_native: false,
})
})
}
......@@ -4876,7 +4880,7 @@ impl CompilerPass for InstructionSelection {
// we need to be aware of exception blocks so that we can emit information to catch exceptions
let loc = self.backend.start_exception_block(block_label.clone());
self.current_exn_blocks.insert(block.id(), demangle_name(loc.to_relocatable()));
self.current_exn_blocks.insert(block.id(), loc.to_relocatable());
} else {
// normal block
self.backend.start_block(block_label.clone());
......
......@@ -334,7 +334,7 @@ fn copy_inline_blocks(caller: &mut Vec<Block>, ret_block: MuID, callee: &Functio
let old_id = block.id();
let new_id = *block_map.get(&block.id()).unwrap();
let mut block = Block {
hdr: MuEntityHeader::named(new_id, format!("{}:inlinedblock.#{}", new_id, block.name())),
hdr: MuEntityHeader::named(new_id, format!("{}:inlinedblock.#{}", block.name(), new_id)),
content: block.content.clone(),
control_flow: ControlFlow::default()
};
......
......@@ -81,8 +81,8 @@ rodal_enum!(ValueLocation{(Register: group, id), (Constant: group, word), (Reloc
pub enum ValueLocation {
Register(RegGroup, MuID),
Constant(RegGroup, Word),
Relocatable(RegGroup, MuName),
Relocatable(RegGroup, MuName), // TODO: This only works for mu entities (add a flag to indicate if its native or have a different variant?)
Direct(RegGroup, Address), // Not dumped
Indirect(RegGroup, Address), // Not dumped
}
......
......@@ -109,8 +109,7 @@ fn link_dylib_internal (files: Vec<PathBuf>, lib: &Vec<String>, libpath: &Vec<St
fn get_path_for_mu_func (f: MuName, vm: &VM) -> PathBuf {
let mut ret = PathBuf::from(&vm.vm_options.flag_aot_emit_dir);
ret.push(f);
ret.set_extension("s");
ret.push(f + ".s");
ret
}
......
......@@ -738,7 +738,7 @@ impl <'a> VM {
if self.is_running() {
unimplemented!()
} else {
ValueLocation::Relocatable(backend::RegGroup::GPR, mangle_name(func.name()))
ValueLocation::Relocatable(backend::RegGroup::GPR, func.name())
}
}
......@@ -859,7 +859,7 @@ impl <'a> VM {
{
let mut pending_funcref = self.aot_pending_funcref_store.write().unwrap();
for (addr, vl) in pending_funcref.drain() {
ret.insert(addr, demangle_name(vl.to_relocatable()));
ret.insert(addr, vl.to_relocatable());
}
}
......@@ -1164,7 +1164,7 @@ impl <'a> VM {
let symbol = self.name_of(func_id);
let mut pending_funcref_guard = self.aot_pending_funcref_store.write().unwrap();
pending_funcref_guard.insert(addr, ValueLocation::Relocatable(backend::RegGroup::GPR, mangle_name(symbol)));
pending_funcref_guard.insert(addr, ValueLocation::Relocatable(backend::RegGroup::GPR, symbol));
}
// this function and the following two make assumption that GC will not move object
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment