Commit 6e2649fe authored by qinsoon's avatar qinsoon

Merge branch 'develop' into issue82-inf-loop-in-peephole

parents 6007d267 6d862159
......@@ -42,6 +42,28 @@ test:cargo:runtime:
script:
- RUST_BACKTRACE=1 RUST_TEST_THREADS=1 ./test-release --color=always test_runtime 2> /dev/null
.build_muc: &build_muc |
if [ -d "tests/test_muc/mu-tool-compiler" ]; then rm -Rf tests/test_muc/mu-tool-compiler; fi
cd tests/test_muc
git clone https://gitlab.anu.edu.au/mu/mu-tool-compiler
cd mu-tool-compiler
mkdir lib
ln -s $MU_ZEBU/target/release/libmu.so lib/
make
cd ..
testmuc:test_simple:
stage: test
script:
- *build_muc
- LD_LIBRARY_PATH=mu-tool-compiler/lib/ MUC=mu-tool-compiler/muc python2 -m pytest test_simple.py -v
testmuc:test_swapstack:
stage: test
script:
- *build_muc
- LD_LIBRARY_PATH=mu-tool-compiler/lib/ MUC=mu-tool-compiler/muc python2 -m pytest test_thread_and_stack.py -v
testjit:milestones:
stage: test
script:
......@@ -60,6 +82,8 @@ testjit:cmpops:
testjit:controlflow:
stage: test
script:
# run this test under test_jit directory
# as a C source file is expected in a relative path to current working directory
- cd tests/test_jit
- RUST_BACKTRACE=1 pytest test_controlflow.py -v --color=yes
......
......@@ -22,6 +22,8 @@ extern crate gcc;
#[cfg(target_arch = "x86_64")]
fn main() {
gcc::Build::new()
.flag("-O3")
.flag("-c")
.file("src/runtime/runtime_c_x64_sysv.c")
.compile("libruntime_c.a");
......@@ -39,6 +41,8 @@ fn main() {
#[cfg(target_arch = "aarch64")]
fn main() {
gcc::Build::new()
.flag("-O3")
.flag("-c")
.file("src/runtime/runtime_c_aarch64_sysv.c")
.compile("libruntime_c.a");
......
......@@ -55,5 +55,5 @@ else
git -C ./RPySOM submodule init
git -C ./RPySOM submodule update
fi
pytest test_*.py -v --color=yes 2>&1 | tee $MU_ZEBU/pytest_out.txt
shopt -s extglob
pytest ./test_!(pypy).py -v --color=yes 2>&1 | tee $MU_ZEBU/pytest_out.txt
......@@ -26,4 +26,5 @@ lazy_static = "*"
log = "*"
simple_logger = "*"
rodal = { git = "https://gitlab.anu.edu.au/mu/rodal", version = ">= 0.0.5" }
regex = "*"
#rodal = { path = "../../../rodal_test/rodal", version = ">= 0.0.5" }
This diff is collapsed.
......@@ -795,7 +795,7 @@ impl BlockContent {
}
Instruction_::Call { ref resume, .. } |
Instruction_::CCall { ref resume, .. } |
Instruction_::SwapStack { ref resume, .. } |
Instruction_::SwapStackExc { ref resume, .. } |
Instruction_::ExnInstruction { ref resume, .. } => {
let mut live_outs = vec![];
live_outs.append(&mut resume.normal_dest.get_arguments(&ops));
......@@ -1012,6 +1012,23 @@ impl Value {
}
}
pub fn is_const_zero(&self) -> bool {
match self.v {
Value_::Constant(Constant::Int(val)) if val == 0 => true,
Value_::Constant(Constant::Double(val)) if val == 0f64 => true,
Value_::Constant(Constant::Float(val)) if val == 0f32 => true,
Value_::Constant(Constant::IntEx(ref vec)) => {
if vec.iter().all(|x| *x == 0) {
true
} else {
false
}
}
Value_::Constant(Constant::NullRef) => true,
_ => false
}
}
/// disguises a value as another type.
/// This is usually used for treat an integer type as an integer of a different length
/// This method is unsafe
......@@ -1077,7 +1094,7 @@ impl Value {
}
}
const DISPLAY_ID: bool = false;
const DISPLAY_ID: bool = true;
const DISPLAY_TYPE: bool = true;
const PRINT_ABBREVIATE_NAME: bool = true;
......@@ -1469,7 +1486,8 @@ pub fn mangle_name(name: MuName) -> MuName {
"__mu_".to_string() + name.as_str()
}
// WARNING: This only reverses mangle_name above when no warning is issued)
/// demangles a Mu name
// WARNING: This only reverses mangle_name above when no warning is issued)
pub fn demangle_name(mut name: MuName) -> MuName {
let name = if cfg!(target_os = "macos") && name.starts_with("___mu_") {
name.split_off(1)
......@@ -1492,57 +1510,28 @@ pub fn demangle_name(mut name: MuName) -> MuName {
name
}
// TODO: Why the hell isn't this working?
pub fn demangle_text(text: String) -> String {
let text = text.as_bytes();
let n = text.len();
let mut output = String::new();
extern crate regex;
// We have a mangled name
let mut last_i = 0; // The last i value that we dumped to output
let mut i = 0;
// TODO: this should work for utf-8 stuff right? (sinces all mangled names are in ascii)
while i < n {
let c = text[i] as char;
// We're at the beginining of the string
// wait for a word boundry
if c.is_alphanumeric() || c == '_' {
// We just found a mangled name
if text[i..].starts_with("__mu_".as_bytes()) {
output += std::str::from_utf8(&text[last_i..i]).unwrap();
let start = i;
// Find the end of the name
while i < n {
let c = text[i] as char;
if !c.is_alphanumeric() && c != '_' {
break; // We found the end!
}
i += 1;
}
/// identifies mu names and demangles them
pub fn demangle_text(text: String) -> String {
use self::regex::Regex;
output +=
demangle_name(String::from_utf8(text[start..i].to_vec()).unwrap()).as_str();
// Skip to the end of the name
last_i = i;
continue;
} else {
// Skip to the end of this alphanumeric sequence
while i < n {
let c = text[i] as char;
if !c.is_alphanumeric() && c != '_' {
break; // We found the end!
}
i += 1;
}
}
lazy_static!{
static ref IDENT_NAME: Regex = if cfg!(target_os = "macos") {
Regex::new(r"___mu_\w+").unwrap()
} else {
Regex::new(r"__mu_\w+").unwrap()
};
}
continue;
}
// Not the start of mangled name, continue
i += 1;
let mut res = text.clone();
for cap in IDENT_NAME.captures_iter(&text) {
let name = cap.get(0).unwrap().as_str().to_string();
let demangled = demangle_name(name.clone());
res = res.replacen(&name, &demangled, 1);
}
// Return output plus whatever is left of the string
output + std::str::from_utf8(&text[last_i..n]).unwrap()
res
}
......
......@@ -33,6 +33,7 @@
#[macro_use]
extern crate rodal;
extern crate log;
extern crate simple_logger;
#[macro_use]
extern crate lazy_static;
......
......@@ -1037,52 +1037,6 @@ impl ASMCodeGen {
self.cur_mut().code.push(ASMInst::symbolic(code));
}
fn add_asm_call(
&mut self,
code: String,
potentially_excepting: Option<MuName>,
arguments: Vec<P<Value>>,
target: Option<(MuID, ASMLocation)>
) {
// a call instruction will use all the argument registers
// do not need
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
if target.is_some() {
let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]);
}
for arg in arguments {
uses.insert(arg.id(), vec![]);
}
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for reg in CALLER_SAVED_GPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
}
for reg in CALLER_SAVED_FPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
}
self.add_asm_inst_internal(
code,
defines,
uses,
false,
{
if potentially_excepting.is_some() {
ASMBranchTarget::PotentiallyExcepting(potentially_excepting.unwrap())
} else {
ASMBranchTarget::None
}
},
None
)
}
fn add_asm_inst(
&mut self,
code: String,
......@@ -1147,6 +1101,7 @@ impl ASMCodeGen {
trace!("asm: {}", demangle_text(code.clone()));
trace!(" defines: {:?}", defines);
trace!(" uses: {:?}", uses);
trace!(" target: {:?}", target);
let mc = self.cur_mut();
// put the instruction
......@@ -2162,6 +2117,57 @@ impl ASMCodeGen {
self.add_asm_inst(asm, ignore_zero_register(id1, vec![loc1]), uses, false)
}
fn internal_call(
&mut self,
callsite: Option<String>,
code: String,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
target: Option<(MuID, ASMLocation)>,
may_return: bool
) -> Option<ValueLocation> {
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
if target.is_some() {
let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]);
}
for arg in args {
uses.insert(arg.id(), vec![]);
}
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for ret in ret.iter() {
defines.insert(ret.id(), vec![]);
}
self.add_asm_inst_internal(
code,
defines,
uses,
false,
{
if pe.is_some() {
ASMBranchTarget::PotentiallyExcepting(pe.unwrap())
} else if may_return {
ASMBranchTarget::None
} else {
ASMBranchTarget::Return
}
},
None
);
if callsite.is_some() {
let callsite_symbol = mangle_name(callsite.as_ref().unwrap().clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
Some(ValueLocation::Relocatable(RegGroup::GPR, callsite.unwrap()))
} else {
None
}
}
fn emit_ldr_spill(&mut self, dest: Reg, src: Mem) {
self.internal_load("LDR", dest, src, false, true, false);
}
......@@ -2395,12 +2401,13 @@ impl CodeGenerator for ASMCodeGen {
fn emit_bl(
&mut self,
callsite: String,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool
) -> ValueLocation {
) -> Option<ValueLocation> {
if is_native {
trace_emit!("\tBL /*C*/ {}({:?})", func, args);
} else {
......@@ -2413,34 +2420,27 @@ impl CodeGenerator for ASMCodeGen {
mangle_name(func)
};
let mut ret = ret;
ret.push(LR.clone());
let asm = format!("BL {}", func);
self.add_asm_call(asm, pe, args, None);
let callsite_symbol = mangle_name(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
self.internal_call(callsite, asm, pe, args, ret, None, true)
}
fn emit_blr(
&mut self,
callsite: String,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>
) -> ValueLocation {
args: Vec<P<Value>>,
ret: Vec<P<Value>>
) -> Option<ValueLocation> {
trace_emit!("\tBLR {}({:?})", func, args);
let mut ret = ret;
ret.push(LR.clone());
let (reg1, id1, loc1) = self.prepare_reg(func, 3 + 1);
let asm = format!("BLR {}", reg1);
self.add_asm_call(asm, pe, args, Some((id1, loc1)));
let callsite_symbol = mangle_name(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
self.internal_call(callsite, asm, pe, args, ret, Some((id1, loc1)), true)
}
......@@ -2458,23 +2458,33 @@ impl CodeGenerator for ASMCodeGen {
None
);
}
fn emit_b_func(&mut self, func_name: MuName, args: Vec<P<Value>>) {
trace_emit!("\tB {}({:?})", func_name, args);
let asm = format!("/*TAILCALL*/ B {}", mangle_name(func_name.clone()));
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for arg in args {
uses.insert(arg.id(), vec![]);
fn emit_b_call(
&mut self,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool,
may_return: bool
) -> Option<ValueLocation> {
if is_native {
trace_emit!("\tB /*C*/ {}({:?})", func, args);
} else {
trace_emit!("\tB {}({:?})", func, args);
}
self.add_asm_inst_internal(
asm,
linked_hashmap!{},
uses,
false,
ASMBranchTarget::Return,
None
);
let func = if is_native {
"/*C*/".to_string() + func.as_str()
} else {
mangle_name(func)
};
let asm = format!("/*CALL*/ B {}", func);
self.internal_call(callsite, asm, pe, args, ret, None, may_return)
}
fn emit_b_cond(&mut self, cond: &str, dest_name: MuName) {
trace_emit!("\tB.{} {}", cond, dest_name);
......@@ -2503,35 +2513,23 @@ impl CodeGenerator for ASMCodeGen {
None
);
}
fn emit_br_func(&mut self, func_address: Reg, args: Vec<P<Value>>) {
trace_emit!("\tBR {}({:?})", func_address, args);
let (reg1, id1, loc1) = self.prepare_reg(func_address, 2 + 1);
let asm = format!("/*TAILCALL*/ BR {}", reg1);
fn emit_br_call(
&mut self,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
may_return: bool
) -> Option<ValueLocation> {
trace_emit!("\tBR {}({:?})", func, args);
let mut added_id1 = false;
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for arg in args {
if arg.id() == id1 {
uses.insert(arg.id(), vec![loc1.clone()]);
added_id1 = true;
} else {
uses.insert(arg.id(), vec![]);
}
}
if !added_id1 {
uses.insert(id1, vec![loc1]);
}
self.add_asm_inst_internal(
asm,
linked_hashmap!{},
uses,
false,
ASMBranchTarget::Return,
None
);
let (reg1, id1, loc1) = self.prepare_reg(func, 3 + 1);
let asm = format!("/*CALL*/ BR {}", reg1);
self.internal_call(callsite, asm, pe, args, ret, Some((id1, loc1)), may_return)
}
fn emit_cbnz(&mut self, src: Reg, dest_name: MuName) {
self.internal_branch_op("CBNZ", src, dest_name);
}
......
......@@ -120,26 +120,45 @@ pub trait CodeGenerator {
// Calls
fn emit_bl(
&mut self,
callsite: String,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool
) -> ValueLocation;
) -> Option<ValueLocation>;
fn emit_blr(
&mut self,
callsite: String,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>
) -> ValueLocation;
args: Vec<P<Value>>,
ret: Vec<P<Value>>
) -> Option<ValueLocation>;
// Branches
fn emit_b(&mut self, dest_name: MuName);
fn emit_b_func(&mut self, func: MuName, args: Vec<P<Value>>); // For tail calls
fn emit_b_cond(&mut self, cond: &str, dest_name: MuName);
fn emit_br(&mut self, dest_address: Reg);
fn emit_br_func(&mut self, func_address: Reg, args: Vec<P<Value>>); // For tail calls
fn emit_b_call(
&mut self,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool,
may_return: bool
) -> Option<ValueLocation>;
fn emit_br_call(
&mut self,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
may_return: bool
) -> Option<ValueLocation>;
fn emit_ret(&mut self, src: Reg);
fn emit_cbnz(&mut self, src: Reg, dest_name: MuName);
......
This diff is collapsed.
......@@ -1199,7 +1199,8 @@ impl ASMCodeGen {
&mut self,
code: String,
potentially_excepting: Option<MuName>,
arguments: Vec<P<Value>>,
use_vec: Vec<P<Value>>,
def_vec: Vec<P<Value>>,
target: Option<(MuID, ASMLocation)>
) {
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
......@@ -1207,20 +1208,13 @@ impl ASMCodeGen {
let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]);
}
for arg in arguments {
uses.insert(arg.id(), vec![]);
for u in use_vec {
uses.insert(u.id(), vec![]);
}
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for reg in x86_64::CALLER_SAVED_GPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
}
for reg in x86_64::CALLER_SAVED_FPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
for d in def_vec {
defines.insert(d.id(), vec![]);
}
self.add_asm_inst_internal(
......@@ -3291,18 +3285,15 @@ impl CodeGenerator for ASMCodeGen {
callsite: String,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
uses: Vec<P<Value>>,
defs: Vec<P<Value>>,
is_native: bool
) -> ValueLocation {
if is_native {
trace!("emit: call /*C*/ {}({:?})", func, args);
} else {
trace!("emit: call {}({:?})", func, args);
}
let func = if is_native {
trace!("emit: call /*C*/ {}({:?})", func, uses);
"/*C*/".to_string() + symbol(func).as_str()
} else {
trace!("emit: call {}({:?})", func, uses);
symbol(mangle_name(func))
};
......@@ -3312,7 +3303,7 @@ impl CodeGenerator for ASMCodeGen {
format!("call {}@PLT", func)
};
self.add_asm_call(asm, pe, args, None);
self.add_asm_call(asm, pe, uses, defs, None);
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
......@@ -3323,14 +3314,15 @@ impl CodeGenerator for ASMCodeGen {
callsite: String,
func: &P<Value>,
pe: Option<MuName>,
args: Vec<P<Value>>
uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation {
trace!("emit: call {}", func);
let (reg, id, loc) = self.prepare_reg(func, 6);
let asm = format!("call *{}", reg);
// the call uses the register
self.add_asm_call(asm, pe, args, Some((id, loc)));
self.add_asm_call(asm, pe, uses, defs, Some((id, loc)));
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
......@@ -3342,12 +3334,61 @@ impl CodeGenerator for ASMCodeGen {
callsite: String,
func: &P<Value>,
pe: Option<MuName>,
args: Vec<P<Value>>
uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
}
fn emit_call_jmp(
&mut self,
callsite: String,
func: MuName,
pe: Option<MuName>,
uses: Vec<P<Value>>,
defs: Vec<P<Value>>,
is_native: bool
) -> ValueLocation {
let func = if is_native {
trace!("emit: call/jmp /*C*/ {}({:?})", func, uses);
"/*C*/".to_string() + symbol(func).as_str()
} else {
trace!("emit: call/jmp {}({:?})", func, uses);
symbol(mangle_name(func))
};
let asm = if cfg!(target_os = "macos") {
format!("/*CALL*/ jmp {}", func)
} else {
format!("/*CALL*/ jmp {}@PLT", func)
};
self.add_asm_call(asm, pe, uses, defs, None);
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_call_jmp_indirect(
&mut self,
callsite: String,
func: &P<Value>,
pe: Option<MuName>,
uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation {
trace!("emit: call/jmp {}", func);
let (reg, id, loc) = self.prepare_reg(func, 6);
let asm = format!("/*CALL*/ jmp *{}", reg);
// the call uses the register
self.add_asm_call(asm, pe, uses, defs, Some((id, loc)));
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_ret(&mut self) {
trace!("emit: ret");
......@@ -3706,8 +3747,6 @@ pub fn emit_code(fv: &mut MuFunctionVersion, vm: &VM) {
// constants in text section
file.write("\t.text\n".as_bytes()).unwrap();
// alignment for constant are 16 bytes
write_const_align(