WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.6% of users enabled 2FA.

Commit 30226d91 authored by qinsoon's avatar qinsoon
Browse files

Merge remote-tracking branch 'origin/develop' into trace-scheduling

parents 0b2bb763 5a939029
......@@ -307,6 +307,8 @@ impl MuFunctionVersion {
ret
}
// TODO: It may be more efficient to compute this when the instructions
// are added to the function version and store the result in a field
pub fn has_throw(&self) -> bool {
let f_content = self.content.as_ref().unwrap();
......@@ -332,6 +334,32 @@ impl MuFunctionVersion {
false
}
pub fn has_tailcall(&self) -> bool {
let f_content = self.content.as_ref().unwrap();
for (_, block) in f_content.blocks.iter() {
let block_content = block.content.as_ref().unwrap();
for inst in block_content.body.iter() {
match inst.v {
TreeNode_::Instruction(ref inst) => {
match inst.v {
Instruction_::TailCall(_) => {return true;}
_ => {
// do nothing
}
}
},
_ => {
unreachable!()
}
}
}
}
false
}
}
/// FunctionContent contains all blocks (which include all instructions) for the function
......
......@@ -13,6 +13,7 @@
// limitations under the License.
use ast::ir::*;
use ast::ptr::P;
use runtime::ValueLocation;
use compiler::machine_code::MachineCode;
......@@ -47,10 +48,6 @@ pub trait CodeGenerator {
// emit code to adjust frame
fn emit_frame_grow(&mut self); // Emits a SUB
// Used to pass a string that the assembler will interpret as an immediate argument
// (This is neccesary to support the use of ELF relocations like ':tprel_hi12:foo')
fn emit_add_str(&mut self, dest: Reg, src1: Reg, src2: &str);
// stack minimpulation
fn emit_push_pair(&mut self, src1: Reg, src2: Reg, stack: Reg); // Emits a STP
fn emit_pop_pair(&mut self, dest1: Reg, dest2: Reg, stack: Reg); // Emits a LDP
......@@ -59,6 +56,8 @@ pub trait CodeGenerator {
fn emit_ldr_callee_saved(&mut self, dest: Reg, src: Mem);
fn emit_str_callee_saved(&mut self, dest: Mem, src: Reg);
//==================================================================================================
/* Bellow ar all ARMv8-A Aarch64 instruction menmonics (with all operand modes) except:
PRFM, PRFUM, CRC32*
All advanced SIMD instructions (except MOVI)
......@@ -102,15 +101,15 @@ pub trait CodeGenerator {
fn emit_stnp(&mut self, dest: Mem, src1: Reg/*GPR or FPR*/, src2: Reg/*GPR or FPR*/); // [base, #simm7]
// Calls
fn emit_bl(&mut self, callsite: String, func: MuName, pe: Option<MuName>, is_native: bool) -> ValueLocation;
fn emit_blr(&mut self, callsite: String, func: Reg, pe: Option<MuName>) -> ValueLocation;
fn emit_bl(&mut self, callsite: String, func: MuName, pe: Option<MuName>, args: Vec<P<Value>>, is_native: bool) -> ValueLocation;
fn emit_blr(&mut self, callsite: String, func: Reg, pe: Option<MuName>, args: Vec<P<Value>>) -> ValueLocation;
// Branches
fn emit_b(&mut self, dest_name: MuName);
fn emit_b_func(&mut self, func: MuName);
fn emit_b_func(&mut self, func: MuName, args: Vec<P<Value>>); // For tail calls
fn emit_b_cond(&mut self, cond: &str, dest_name: MuName);
fn emit_br(&mut self, dest_address: Reg);
fn emit_br_func(&mut self, func_address: Reg);
fn emit_br_func(&mut self, func_address: Reg, args: Vec<P<Value>>); // For tail calls
fn emit_ret(&mut self, src: Reg);
fn emit_cbnz(&mut self, src: Reg, dest_name: MuName);
......@@ -123,8 +122,8 @@ pub trait CodeGenerator {
fn emit_mrs(&mut self, dest: Reg, src: &str);
// Address calculation
fn emit_adr(&mut self, dest: Reg, src: Reg);
fn emit_adrp(&mut self, dest: Reg, src: Reg);
fn emit_adr(&mut self, dest: Reg, src: Mem);
fn emit_adrp(&mut self, dest: Reg, src: Mem);
// Unary ops
fn emit_mov(&mut self, dest: Reg/*GPR or SP or ZR*/, src: Reg/*GPR or SP or ZR*/); // The SP and ZR cannot both be used
......
......@@ -67,6 +67,7 @@ pub struct InstructionSelection {
current_callsites: LinkedList<(MuName, MuID, usize)>,
// key: block id, val: block location
current_exn_blocks: HashMap<MuID, MuName>,
current_stack_arg_size: usize,
current_xr_value: Option<P<Value>>, // A temporary that holds to saved XR value (if needed)
current_constants: HashMap<MuID, P<Value>>,
current_constants_locs: HashMap<MuID, P<Value>>
......@@ -94,6 +95,7 @@ impl <'a> InstructionSelection {
current_func_start: None,
current_callsites: LinkedList::new(),
current_exn_blocks: HashMap::new(),
current_stack_arg_size: 0,
current_xr_value: None,
current_constants: HashMap::new(),
current_constants_locs: HashMap::new()
......@@ -118,7 +120,7 @@ impl <'a> InstructionSelection {
// TODO: Optimise if cond is a flag from a binary operation?
Instruction_::Branch2 { cond, ref true_dest, ref false_dest, .. } => {
trace!("instsel on BRANCH2");
let (fallthrough_dest, branch_dest, branch_if_true) = (false_dest, true_dest);
let (fallthrough_dest, branch_dest) = (false_dest, true_dest);
let ref ops = inst.ops;
......@@ -334,6 +336,7 @@ impl <'a> InstructionSelection {
}
self.emit_mu_call(
false, // is tail
inst, // inst: &Instruction,
data, // calldata: &CallData,
None, // resumption: Option<&ResumptionData>,
......@@ -345,6 +348,7 @@ impl <'a> InstructionSelection {
trace!("instsel on CALL");
self.emit_mu_call(
false, // is tail
inst,
data,
Some(resume),
......@@ -352,6 +356,18 @@ impl <'a> InstructionSelection {
f_content, f_context, vm);
},
Instruction_::TailCall(ref data) => {
trace!("instsel on TAILCALL");
self.emit_mu_call(
true, // is tail
inst,
data,
None,
node,
f_content, f_context, vm);
},
Instruction_::ExprCCall { ref data, is_abort } => {
trace!("instsel on EXPRCCALL");
......@@ -376,7 +392,7 @@ impl <'a> InstructionSelection {
// TODO: Are vals in the same order as the return types in the functions signature?
let ret_tys = vals.iter().map(|i| node_type(&ops[*i])).collect();
let ret_type = self.combine_return_types(&ret_tys);
let ret_type = self.combine_return_types(&ret_tys, vm);
let n = ret_tys.len(); // number of return values
let xr_value = self.current_xr_value.as_ref().unwrap().clone();
......@@ -2887,14 +2903,18 @@ impl <'a> InstructionSelection {
// Note: if tys has more than 1 element, then this will return a new struct type
// , but each call will generate a different name for this struct type (but the layout will be identical)
fn combine_return_types(&self, tys: &Vec<P<MuType>>) -> P<MuType>{
fn combine_return_types(&self, tys: &Vec<P<MuType>>, vm: &VM) -> P<MuType>{
let n = tys.len();
if n == 0 {
VOID_TYPE.clone()
} else if n == 1 {
tys[0].clone()
} else {
P(MuType::new(new_internal_id(), MuType_::mustruct(format!("#{}", new_internal_id()), tys.to_vec())))
//declare_type(&self, entity: MuEntityHeader, ty: MuType_)
let id = new_internal_id();
let name = format!("return_type:#{}", id);
let header = MuEntityHeader::named(new_internal_id(), name.clone());
vm.declare_type(header, MuType_::mustruct(name, tys.to_vec()))
}
}
......@@ -3074,21 +3094,38 @@ impl <'a> InstructionSelection {
// returns the stack arg offset - we will need this to collapse stack after the call
fn emit_precall_convention(&mut self, args: &Vec<P<Value>>, arg_tys: &Vec<P<MuType>>, return_size: usize, f_context: &mut FunctionContext, vm: &VM) -> usize
// as well as a list of argument registers
fn emit_precall_convention(&mut self, is_tail: bool, args: &Vec<P<Value>>, arg_tys: &Vec<P<MuType>>, return_size: usize, f_context: &mut FunctionContext, vm: &VM)
-> (usize, Vec<P<Value>>)
{
//sig.ret_tys
let (_, locations, stack_size) = self.compute_argument_locations(&arg_tys, &SP, 0, &vm);
// If we're tail calling, use the current frame's argument location instead
let mut reg_args = Vec::<P<Value>>::new();
let (arg_base, arg_offset) = if is_tail { (&*FP, 16) } else { (&*SP, 0) };
let (_, locations, stack_size) = self.compute_argument_locations(&arg_tys, arg_base, arg_offset, &vm);
if is_tail {
if stack_size > self.current_stack_arg_size {
unimplemented!();
} else {
if return_size > 0 {
// Restore XR
let xr_value = self.current_xr_value.as_ref().unwrap().clone();
self.backend.emit_mov(&XR, &xr_value);
}
}
if return_size > 0 {
// Reserve space on the stack for the return value
emit_sub_u64(self.backend.as_mut(), &SP, &SP, return_size as u64);
} else {
if return_size > 0 {
// Reserve space on the stack for the return value
emit_sub_u64(self.backend.as_mut(), &SP, &SP, return_size as u64);
// XR needs to point to where the callee should return arguments
self.backend.emit_mov(&XR, &SP);
// XR needs to point to where the callee should return arguments
self.backend.emit_mov(&XR, &SP);
}
// Reserve space on the stack for all stack arguments
emit_sub_u64(self.backend.as_mut(), &SP, &SP, stack_size as u64);
}
// Reserve space on the stack for all stack arguments
emit_sub_u64(self.backend.as_mut(), &SP, &SP, stack_size as u64);
// Write the arguments to where they belong on the stack
for i in 0..args.len() {
let i = i as usize;
let ref arg_val = args[i];
......@@ -3112,17 +3149,22 @@ impl <'a> InstructionSelection {
let arg_val = emit_reg_value(self.backend.as_mut(), &arg_val, f_context, vm);
let (val_l, val_h) = split_int128(&arg_val, f_context, vm);
let arg_loc_h = get_register_from_id(arg_loc.id() + 2);
reg_args.push(arg_loc.clone());
reg_args.push(arg_loc_h.clone());
emit_move_value_to_value(self.backend.as_mut(), &arg_loc, &val_l, f_context, vm);
emit_move_value_to_value(self.backend.as_mut(), &arg_loc_h, &val_h, f_context, vm);
} else {
if arg_loc.is_reg() {
reg_args.push(arg_loc.clone());
}
emit_move_value_to_value(self.backend.as_mut(), &arg_loc, &arg_val, f_context, vm)
}
}
}
}
stack_size
(stack_size, reg_args)
}
fn emit_postcall_convention(&mut self, ret_tys: &Vec<P<MuType>>, rets: &Option<Vec<P<Value>>>, ret_type: &P<MuType>, arg_size: usize, ret_size: usize, f_context: &mut FunctionContext, vm: &VM) -> Vec<P<Value>> {
......@@ -3357,9 +3399,9 @@ impl <'a> InstructionSelection {
f_context: &mut FunctionContext,
vm: &VM) -> Vec<P<Value>>
{
let return_type = self.combine_return_types(&sig.ret_tys);
let return_type = self.combine_return_types(&sig.ret_tys, vm);
let return_size = self.compute_return_allocation(&return_type, &vm);
let stack_arg_size = self.emit_precall_convention(&args, &sig.arg_tys, return_size, f_context, vm);
let (stack_arg_size, arg_regs) = self.emit_precall_convention(false, &args, &sig.arg_tys, return_size, f_context, vm);
// make call
if vm.is_doing_jit() {
......@@ -3367,7 +3409,7 @@ impl <'a> InstructionSelection {
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_bl(callsite.clone(), func_name, None, true); // assume ccall wont throw exception
self.backend.emit_bl(callsite.clone(), func_name, None, arg_regs, true); // assume ccall wont throw exception
// TODO: What if theres an exception block?
self.current_callsites.push_back((callsite, 0, stack_arg_size));
......@@ -3454,6 +3496,7 @@ impl <'a> InstructionSelection {
fn emit_mu_call(
&mut self,
is_tail: bool, // For tail calls
inst: &Instruction,
calldata: &CallData,
resumption: Option<&ResumptionData>,
......@@ -3480,9 +3523,9 @@ impl <'a> InstructionSelection {
debug_assert!(func_sig.arg_tys.len() == calldata.args.len());
if cfg!(debug_assertions) {
if inst.value.is_some() {
assert!(func_sig.ret_tys.len() == inst.value.as_ref().unwrap().len());
assert!((!is_tail) && (func_sig.ret_tys.len() == inst.value.as_ref().unwrap().len()));
} else {
assert!(func_sig.ret_tys.len() == 0, "expect call inst's value doesnt match reg args. value: {:?}, ret args: {:?}", inst.value, func_sig.ret_tys);
assert!(is_tail || (func_sig.ret_tys.len() == 0), "expect call inst's value doesnt match reg args. value: {:?}, ret args: {:?}", inst.value, func_sig.ret_tys);
}
}
......@@ -3501,9 +3544,9 @@ impl <'a> InstructionSelection {
unimplemented!();
}
}
let return_type = self.combine_return_types(&func_sig.ret_tys);
let return_type = self.combine_return_types(&func_sig.ret_tys, vm);
let return_size = self.compute_return_allocation(&return_type, &vm);
let stack_arg_size = self.emit_precall_convention(&arg_values, &func_sig.arg_tys, return_size, f_context, vm);
let (stack_arg_size, arg_regs) = self.emit_precall_convention(is_tail, &arg_values, &func_sig.arg_tys, return_size, f_context, vm,);
// check if this call has exception clause - need to tell backend about this
let potentially_excepting = {
......@@ -3515,40 +3558,58 @@ impl <'a> InstructionSelection {
}
};
if is_tail {
// Restore callee saved registers and pop the frame
self.emit_epilogue(f_context, vm);
}
trace!("generating call inst");
// check direct call or indirect
let callsite = {
if is_tail {
// Emit a branch
if self.match_funcref_const(func) {
let target_id = self.node_funcref_const_to_id(func);
let funcs = vm.funcs().read().unwrap();
let target = funcs.get(&target_id).unwrap().read().unwrap();
if vm.is_doing_jit() {
unimplemented!()
self.backend.emit_b_func(target.name(), arg_regs);
} else {
let target = self.emit_ireg(func, f_content, f_context, vm);
self.backend.emit_br_func(&target, arg_regs);
}
} else {
// Emit a branch with link (i.e. a call)
let callsite = {
if self.match_funcref_const(func) {
let target_id = self.node_funcref_const_to_id(func);
let funcs = vm.funcs().read().unwrap();
let target = funcs.get(&target_id).unwrap().read().unwrap();
if vm.is_doing_jit() {
unimplemented!()
} else {
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_bl(callsite, target.name(), potentially_excepting, arg_regs, false)
}
} else {
let target = self.emit_ireg(func, f_content, f_context, vm);
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_bl(callsite, target.name(), potentially_excepting, false)
self.backend.emit_blr(callsite, &target, potentially_excepting, arg_regs)
}
} else {
let target = self.emit_ireg(func, f_content, f_context, vm);
};
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_blr(callsite, &target, potentially_excepting)
}
};
// record exception branch
if resumption.is_some() {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
// record exception branch
if resumption.is_some() {
let ref exn_dest = resumption.as_ref().unwrap().exn_dest;
let target_block = exn_dest.target;
self.current_callsites.push_back((callsite.to_relocatable(), target_block, stack_arg_size));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0, stack_arg_size));
}
self.current_callsites.push_back((callsite.to_relocatable(), target_block, stack_arg_size));
} else {
self.current_callsites.push_back((callsite.to_relocatable(), 0, stack_arg_size));
// deal with ret vals
self.emit_postcall_convention(&func_sig.ret_tys, &inst.value, &return_type, stack_arg_size, return_size, f_context, vm);
}
// deal with ret vals
self.emit_postcall_convention(&func_sig.ret_tys, &inst.value, &return_type, stack_arg_size, return_size, f_context, vm);
}
fn process_dest(&mut self, ops: &Vec<P<TreeNode>>, dest: &Destination, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) {
......@@ -3602,7 +3663,7 @@ impl <'a> InstructionSelection {
self.backend.emit_frame_grow(); // will include space for callee saved registers
// We need to return arguments in the memory area pointed to by XR, so we need to save it
let ret_ty = self.combine_return_types(&sig.ret_tys);
let ret_ty = self.combine_return_types(&sig.ret_tys, vm);
// This should impose no overhead if it's never used
self.current_xr_value = Some(make_temporary(f_context, ADDRESS_TYPE.clone(), vm));
......@@ -3631,8 +3692,8 @@ impl <'a> InstructionSelection {
// unload arguments
// Read arguments starting from FP+16 (FP points to the frame record (the previouse FP and LR)
let (_, locations, _) = self.compute_argument_locations(&sig.arg_tys, &FP, 16, &vm);
let (_, locations, stack_arg_size) = self.compute_argument_locations(&sig.arg_tys, &FP, 16, &vm);
self.current_stack_arg_size = stack_arg_size;
for i in 0..args.len() {
let i = i as usize;
let ref arg_val = args[i];
......@@ -3660,7 +3721,7 @@ impl <'a> InstructionSelection {
// Integral or pointer type
_ => {
if is_int_ex_reg(&arg_val) {
if is_int_ex_reg(&arg_val) && is_int_reg(&arg_loc) {
let (val_l, val_h) = split_int128(arg_val, f_context, vm);
let arg_loc_h = get_register_from_id(arg_loc.id() + 2);
......@@ -3687,8 +3748,7 @@ impl <'a> InstructionSelection {
self.finish_block();
}
// Todo: Don't emit this if the function never returns
fn emit_common_epilogue(&mut self, f_context: &mut FunctionContext, vm: &VM) {
fn emit_epilogue(&mut self, f_context: &mut FunctionContext, vm: &VM) {
// pop all callee-saved registers
for i in (0..CALLEE_SAVED_FPRS.len()).rev() {
let ref reg = CALLEE_SAVED_FPRS[i];
......@@ -3711,7 +3771,6 @@ impl <'a> InstructionSelection {
self.backend.emit_pop_pair(&FP, &LR, &SP);
// Note: the stack pointer should now be what it was when the function was called
self.backend.emit_ret(&LR); // return to the Link Register
}
fn match_cmp_res(&mut self, op: &TreeNode) -> bool {
......@@ -4479,6 +4538,13 @@ impl CompilerPass for InstructionSelection {
}
fn finish_function(&mut self, vm: &VM, func: &mut MuFunctionVersion) {
// Todo: Don't emit this if the function never returns
let epilogue_block = format!("{}:{}", self.current_fv_name, EPILOGUE_BLOCK_NAME);
self.start_block(epilogue_block);
self.emit_epilogue(&mut func.context, vm);
self.backend.emit_ret(&LR); // return to the Link Register
self.finish_block();
self.backend.print_cur_code();
let func_name = {
......
......@@ -1746,7 +1746,7 @@ fn emit_madd_u64(backend: &mut CodeGenerator, dest: &P<Value>, src1: &P<Value>,
let shift = log2(val as u64) as u8;
// dest = src1 << log2(val) + src2
if shift <= 4 {
backend.emit_add_ext(&dest, &dest, &src2, false, shift);
backend.emit_add_ext(&dest, &src2, &src1, false, shift);
} else {
backend.emit_lsl_imm(&dest, &src1, shift);
backend.emit_add(&dest, &dest, &src2);
......@@ -2012,6 +2012,7 @@ fn split_int128(int128: &P<Value>, f_context: &mut FunctionContext, vm: &VM) ->
f_context.get_value_mut(int128.id()).unwrap().set_split(vec![arg_l.clone(), arg_h.clone()]);
trace!("ISAAC <- make temporary ({}, {})", &arg_l, &arg_h);
(arg_l, arg_h)
}
}
......@@ -2030,6 +2031,7 @@ pub fn emit_ireg_ex_value(backend: &mut CodeGenerator, pv: &P<Value>, f_context:
emit_mov_u64(backend, &tmp_l, val[0]);
emit_mov_u64(backend, &tmp_h, val[1]);
trace!("ISAAC <- ({}, {}) = ({}, {})", &tmp_l, &tmp_h, val[0], val[1]);
(tmp_l, tmp_h)
},
_ => panic!("expected ireg_ex")
......
......@@ -362,8 +362,13 @@ pub fn build_interference_graph_chaitin_briggs(cf: &mut CompiledFunction, func:
};
trace_if!(TRACE_LIVENESS, "Block{}: Inst{}: src={:?}", block, i, src);
let defines = cf.mc().get_inst_reg_defines(i);
for d in defines.iter() {
current_live.insert(*d);
}
// for every definition D in I
for d in cf.mc().get_inst_reg_defines(i) {
for d in defines {
trace_if!(TRACE_LIVENESS, "Block{}: Inst{}: for definition {}",
block, i, func.context.get_temp_display(d));
// add an interference from D to every element E in Current_Live - {D}
......
......@@ -125,12 +125,13 @@ impl Inlining {
let n_insts = estimate_insts(&fv);
let out_calls = fv.get_static_call_edges();
let has_throw = fv.has_throw();
let has_tailcall = fv.has_tailcall();
// simple heuristic here:
// * estimated machine insts are fewer than 10 insts
// * leaf in call graph (no out calls)
// * no throw (otherwise we will need to rearrange catch)
let should_inline = n_insts <= 25 && out_calls.len() == 0 && !has_throw;
let should_inline = n_insts <= 25 && out_calls.len() == 0 && !has_throw && !has_tailcall;
trace!("func {} has {} insts (estimated)", callee, n_insts);
trace!(" has {} out calls", out_calls.len());
......
# Copyright 2017 The Australian National University
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
......@@ -25,7 +25,9 @@ proj_dir = py.path.local(MU_ZEBU) if MU_ZEBU else py.path.local(__file__).join('
test_jit_dir = proj_dir.join('tests', 'test_jit')
testsuite_dir = test_jit_dir.join('suite')
# testsuite_dir = py.path.local('/Users/johnz/Documents/Work/mu-client-pypy/rpython/translator/mu/test_impl')
bin_dir = py.path.local('/tmp')
bin_dir = py.path.local('emit') # put everything under emit
if not bin_dir.exists():
bin_dir.mkdir()
if sys.platform.startswith('darwin'):
libext = '.dylib'
......
......@@ -78,16 +78,16 @@ def test_add():
def test_except_stack_args():
compile_bundle(
"""
.funcsig stack_sig = (int<64> int<64> int<64> int<64> int<64> int<64> int<64>)->()
.funcsig stack_sig = (int<64> int<64> int<64> int<64> int<64> int<64> int<64> int<64> int<64>)->()
.funcdef stack_args <stack_sig>
{
entry(<int<64>> v0 <int<64>> v1 <int<64>> v2 <int<64>> v3 <int<64>> v4 <int<64>> v5 <int<64>> v6):
entry(<int<64>> v0 <int<64>> v1 <int<64>> v2 <int<64>> v3 <int<64>> v4 <int<64>> v5 <int<64>> v6 <int<64>> v7 <int<64>> v8):
THROW <ref<void>> NULL
}
.funcdef test_except_stack_args <main_sig>
{
entry(<int<32>>argc <uptr<uptr<char>>>argv):
CALL <stack_sig> stack_args(<int<32>>0 <int<32>>1 <int<32>>2 <int<32>>3 <int<32>>4 <int<32>>5 <int<32>>6)
CALL <stack_sig> stack_args(<int<32>>0 <int<32>>1 <int<32>>2 <int<32>>3 <int<32>>4 <int<32>>5 <int<32>>6 <int<32>>7 <int<32>>8)
EXC (exit(<int<32>> 0) exit(<int<32>> 1))
exit(<int<32>> status):
......@@ -95,4 +95,16 @@ def test_except_stack_args():
}
""",
"test_except_stack_args");
assert(execute("test_except_stack_args") == 1);
\ No newline at end of file
assert(execute("test_except_stack_args") == 1);
def test_ldp_bug():
compile_bundle(
"""
.funcdef foo <(int<128> int<128> int<128> int<128> int<128> int<128>)->(int<128>)>
{
entry(<int<128>>a0 <int<128>>a1 <int<128>>a2 <int<128>>a3 <int<128>>a4 <int<128>>a5):
RET a5
}
""", "test_taillcall_smaller_stack");
assert(execute("test_taillcall_smaller_stack") == 12);
\ No newline at end of file
# Copyright 2017 The Australian National University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.