Commit c6bdd727 authored by qinsoon's avatar qinsoon

[wip] let backend know about exception flow. also add one more test to see if...

[wip] let backend know about exception flow. also add one more test to see if callee-saved register is restored
during catching exception, the test failed (and I marked it as ignored).
parent 46a10665
......@@ -2,6 +2,7 @@ use ir::*;
use ptr::*;
use types::*;
use op::*;
use ir_semantics;
use utils::vec_utils;
......@@ -68,6 +69,63 @@ impl Clone for Instruction {
}
impl Instruction {
pub fn has_exception_clause(&self) -> bool {
ir_semantics::is_potentially_excepting_instruction(&self.v)
}
pub fn get_exception_target(&self) -> Option<MuID> {
use inst::Instruction_::*;
match self.v {
Watchpoint {ref resume, ..}
| Call {ref resume, ..}
| CCall {ref resume, ..}
| SwapStack {ref resume, ..}
| ExnInstruction {ref resume, ..} => {
Some(resume.exn_dest.target)
},
BinOp(_, _, _)
| BinOpWithStatus(_, _, _, _)
| CmpOp(_, _, _)
| ConvOp{..}
| ExprCall{..}
| ExprCCall{..}
| Load{..}
| Store{..}
| CmpXchg{..}
| AtomicRMW{..}
| New(_)
| AllocA(_)
| NewHybrid(_, _)
| AllocAHybrid(_, _)
| NewStack(_)
| NewThread(_, _)
| NewThreadExn(_, _)
| NewFrameCursor(_)
| GetIRef(_)
| GetFieldIRef{..}
| GetElementIRef{..}
| ShiftIRef{..}
| GetVarPartIRef{..}
| Fence(_)
| Return(_)
| ThreadExit
| Throw(_)
| TailCall(_)
| Branch1(_)
| Branch2{..}
| Select{..}
| WPBranch{..}
| Switch{..}
| CommonInst_GetThreadLocal
| CommonInst_SetThreadLocal(_)
| CommonInst_Pin(_)
| CommonInst_Unpin(_)
| Move(_)
| PrintHex(_) => None
}
}
fn debug_str(&self, ops: &Vec<P<TreeNode>>) -> String {
self.v.debug_str(ops)
}
......
......@@ -103,3 +103,53 @@ pub fn has_side_effect(inst: &Instruction_) -> bool {
&PrintHex(_) => true
}
}
pub fn is_potentially_excepting_instruction(inst: &Instruction_) -> bool {
match inst {
&Watchpoint{..}
| &Call{..}
| &CCall{..}
| &SwapStack{..}
| &ExnInstruction{..} => true,
&BinOp(_, _, _)
| &BinOpWithStatus(_, _, _, _)
| &CmpOp(_, _, _)
| &ConvOp{..}
| &ExprCall{..}
| &ExprCCall{..}
| &Load{..}
| &Store{..}
| &CmpXchg{..}
| &AtomicRMW{..}
| &New(_)
| &AllocA(_)
| &NewHybrid(_, _)
| &AllocAHybrid(_, _)
| &NewStack(_)
| &NewThread(_, _)
| &NewThreadExn(_, _)
| &NewFrameCursor(_)
| &GetIRef(_)
| &GetFieldIRef{..}
| &GetElementIRef{..}
| &ShiftIRef{..}
| &GetVarPartIRef{..}
| &Fence(_)
| &Return(_)
| &ThreadExit
| &Throw(_)
| &TailCall(_)
| &Branch1(_)
| &Branch2{..}
| &Select{..}
| &WPBranch{..}
| &Switch{..}
| &CommonInst_GetThreadLocal
| &CommonInst_SetThreadLocal(_)
| &CommonInst_Pin(_)
| &CommonInst_Unpin(_)
| &Move(_)
| &PrintHex(_) => false
}
}
\ No newline at end of file
......@@ -376,6 +376,35 @@ impl ASMCode {
panic!("conditional branch does not have a fallthrough target");
}
},
ASMBranchTarget::PotentiallyExcepting(ref target) => {
// may trigger exception and jump to target - similar as conditional branch
let target_n = self.blocks.get(target).unwrap().start_inst;
// cur inst's succ is target
asm[i].succs.push(target_n);
if TRACE_CFA {
trace!("inst {}: is potentially excepting to {}", i, target);
trace!("inst {}: excepting target index is {}", i, target_n);
trace!("inst {}: set SUCCS as excepting target {}", i, target_n);
}
asm[target_n].preds.push(i);
if let Some(next_inst) = ASMCode::find_next_inst(i, asm) {
// cur succ is next inst
asm[i].succs.push(next_inst);
// next inst's pred is cur
asm[next_inst].preds.push(i);
if TRACE_CFA {
trace!("inst {}: SET SUCCS as PEI fallthrough target {}", i, next_inst);
}
} else {
panic!("PEI does not have a fallthrough target");
}
},
ASMBranchTarget::Return => {
if TRACE_CFA {
trace!("inst {}: is a return", i);
......@@ -436,6 +465,27 @@ impl ASMCode {
}
}
fn find_last_inst(i: usize, asm: &Vec<ASMInst>) -> Option<usize> {
if i == 0 {
None
} else {
let mut cur = i;
while cur >= 0 {
if !asm[cur].is_symbol {
return Some(cur);
}
if cur == 0 {
return None;
} else {
cur -= 1;
}
}
None
}
}
fn add_frame_size_patchpoint(&mut self, patchpoint: ASMLocation) {
self.frame_size_patchpoints.push(patchpoint);
}
......@@ -680,6 +730,10 @@ impl MachineCode for ASMCode {
let mut ret = vec![];
for inst in self.code.iter() {
if !inst.is_symbol {
ret.append(&mut "\t".to_string().into_bytes());
}
ret.append(&mut inst.code.clone().into_bytes());
ret.append(&mut "\n".to_string().into_bytes());
}
......@@ -744,6 +798,14 @@ impl MachineCode for ASMCode {
None => None
}
}
fn get_next_inst(&self, index: usize) -> Option<usize> {
ASMCode::find_next_inst(index, &self.code)
}
fn get_last_inst(&self, index: usize) -> Option<usize> {
ASMCode::find_last_inst(index, &self.code)
}
}
#[derive(Clone, Debug)]
......@@ -751,6 +813,7 @@ enum ASMBranchTarget {
None,
Conditional(MuName),
Unconditional(MuName),
PotentiallyExcepting(MuName),
Return
}
......@@ -930,7 +993,7 @@ impl ASMCodeGen {
regs.map(|x| self.prepare_machine_reg(x)).collect()
}
fn add_asm_call(&mut self, code: String) {
fn add_asm_call(&mut self, code: String, potentially_excepting: Option<MuName>) {
// a call instruction will use all the argument registers
// do not need
let uses : LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
......@@ -960,7 +1023,13 @@ impl ASMCodeGen {
}
}
self.add_asm_inst(code, defines, uses, false);
self.add_asm_inst_internal(code, defines, uses, false, {
if potentially_excepting.is_some() {
ASMBranchTarget::PotentiallyExcepting(potentially_excepting.unwrap())
} else {
ASMBranchTarget::None
}
}, None)
}
fn add_asm_ret(&mut self, code: String) {
......@@ -2703,11 +2772,11 @@ impl CodeGenerator for ASMCodeGen {
}
#[cfg(target_os = "macos")]
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation {
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
let asm = format!("call {}", symbol(func));
self.add_asm_call(asm);
self.add_asm_call(asm, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
......@@ -2718,13 +2787,13 @@ impl CodeGenerator for ASMCodeGen {
#[cfg(target_os = "linux")]
// generating Position-Independent Code using PLT
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation {
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
let func = func + "@PLT";
let asm = format!("call {}", symbol(func));
self.add_asm_call(asm);
self.add_asm_call(asm, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
......@@ -2733,12 +2802,12 @@ impl CodeGenerator for ASMCodeGen {
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation {
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
}
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation {
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
}
......
......@@ -163,9 +163,9 @@ pub trait CodeGenerator {
fn emit_jl(&mut self, dest: MuName);
fn emit_jle(&mut self, dest: MuName);
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation;
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation;
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation;
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName, pe: Option<MuName>) -> ValueLocation;
fn emit_call_near_r64 (&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation;
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation;
fn emit_ret(&mut self);
......
......@@ -2384,7 +2384,7 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(cur_node);
self.backend.emit_call_near_rel32(callsite, func_name);
self.backend.emit_call_near_rel32(callsite, func_name, None); // assume ccall wont throw exception
// record exception block (CCall may have an exception block)
if cur_node.is_some() {
......@@ -2512,6 +2512,16 @@ impl <'a> InstructionSelection {
}
}
let stack_arg_size = self.emit_precall_convention(&arg_values, vm);
// check if this call has exception clause - need to tell backend about this
let potentially_excepting = {
if resumption.is_some() {
let target_id = resumption.unwrap().exn_dest.target;
Some(f_content.get_block(target_id).name().unwrap())
} else {
None
}
};
trace!("generating call inst");
// check direct call or indirect
......@@ -2525,18 +2535,18 @@ impl <'a> InstructionSelection {
unimplemented!()
} else {
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_call_near_rel32(callsite, target.name().unwrap())
self.backend.emit_call_near_rel32(callsite, target.name().unwrap(), potentially_excepting)
}
} else if self.match_ireg(func) {
let target = self.emit_ireg(func, f_content, f_context, vm);
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_call_near_r64(callsite, &target)
self.backend.emit_call_near_r64(callsite, &target, potentially_excepting)
} else if self.match_mem(func) {
let target = self.emit_mem(func, vm);
let callsite = self.new_callsite_label(Some(cur_node));
self.backend.emit_call_near_mem64(callsite, &target)
self.backend.emit_call_near_mem64(callsite, &target, potentially_excepting)
} else {
unimplemented!()
}
......
......@@ -271,9 +271,15 @@ fn local_liveness_analysis (cf: &mut CompiledFunction) -> LinkedHashMap<String,
None => panic!("cannot find range for block {}", block)
};
trace!("Block {}: start_inst={}, end_inst(inclusive)={}", block, range.start, range.end-1);
start_inst_map.insert(range.start, block);
end_inst_map.insert(range.end - 1, block);
let first_inst = range.start;
let last_inst = match mc.get_last_inst(range.end) {
Some(last) => last,
None => panic!("cannot find last instruction in block {}, this block contains no instruction?", block)
};
trace!("Block {}: start_inst={}, end_inst(inclusive)={}", block, first_inst, last_inst);
start_inst_map.insert(first_inst, block);
end_inst_map.insert(last_inst, block);
}
// local liveness analysis
......@@ -324,7 +330,7 @@ fn local_liveness_analysis (cf: &mut CompiledFunction) -> LinkedHashMap<String,
let succs : Vec<String> = {
let mut ret = vec![];
for succ in mc.get_succs(end - 1).into_iter() {
for succ in mc.get_succs(mc.get_last_inst(end).unwrap()).into_iter() {
match start_inst_map.get(succ) {
Some(str) => ret.push(String::from(*str)),
None => {}
......
......@@ -159,6 +159,9 @@ pub trait MachineCode {
fn get_succs(&self, index: usize) -> &Vec<usize>;
fn get_preds(&self, index: usize) -> &Vec<usize>;
fn get_next_inst(&self, index: usize) -> Option<usize>;
fn get_last_inst(&self, index: usize) -> Option<usize>;
fn get_inst_reg_uses(&self, index: usize) -> Vec<MuID>;
fn get_inst_reg_defines(&self, index: usize) -> Vec<MuID>;
......
......@@ -121,6 +121,15 @@ macro_rules! define_block {
body: vec![$($inst), *],
keepalives: None
});
};
(($vm: expr, $fv: ident) $name: ident ($($arg: ident), *) [$exn_arg: ident] {$($inst: ident), *}) => {
$name.content = Some(BlockContent{
args: vec![$($arg.clone_value()), *],
exn_arg: Some($exn_arg.clone_value()),
body: vec![$($inst), *],
keepalives: None
});
}
}
......@@ -383,7 +392,7 @@ macro_rules! inst {
}
});
};
// CALL
// CALL (1 return result)
(($vm: expr, $fv: ident) $name: ident:
$res: ident = CALL ($($op: ident), *) FUNC($func: expr) ($args: expr) $cc: expr,
normal: $norm_dest: ident ($norm_args: expr),
......@@ -411,6 +420,35 @@ macro_rules! inst {
}
});
};
// CALL (no return value)
(($vm: expr, $fv: ident) $name: ident:
CALL ($($op: ident), *) FUNC($func: expr) ($args: expr) $cc: expr,
normal: $norm_dest: ident ($norm_args: expr),
exc: $exc_dest: ident ($exc_args: expr)) => {
let $name = $fv.new_inst(Instruction {
hdr : MuEntityHeader::unnamed($vm.next_id()),
value: None,
ops : RwLock::new(vec![$($op.clone()),*]),
v : Instruction_::Call {
data: CallData {
func: $func,
args: $args,
convention: $cc
},
resume: ResumptionData {
normal_dest: Destination {
target: $norm_dest.id(),
args : $norm_args
},
exn_dest: Destination {
target: $exc_dest.id(),
args : $exc_args
}
}
}
});
};
// RET
(($vm: expr, $fv: ident) $name: ident: RET ($($val: ident), +)) => {
......
extern crate log;
extern crate libloading;
extern crate mu;
use self::mu::ast::types::*;
use self::mu::ast::ir::*;
use self::mu::ast::inst::*;
use self::mu::vm::*;
use self::mu::compiler::*;
use self::mu::utils::LinkedHashMap;
use mu::ast::types::*;
use mu::ast::ir::*;
use mu::ast::inst::*;
use mu::ast::op::*;
use mu::vm::*;
use mu::compiler::*;
use mu::utils::LinkedHashMap;
use self::mu::testutil::aot;
use mu::testutil::aot;
use test_compiler::test_call::gen_ccall_exit;
use std::sync::Arc;
use std::sync::RwLock;
......@@ -48,25 +50,19 @@ fn test_exception_throw_catch_simple() {
aot::execute(executable);
}
fn declare_commons(vm: &VM) {
typedef! ((vm) int64 = mu_int(64));
typedef! ((vm) ref_int64 = mu_ref(int64));
typedef! ((vm) iref_int64 = mu_iref(int64));
constdef! ((vm) <int64> int64_0 = Constant::Int(0));
constdef! ((vm) <int64> int64_1 = Constant::Int(1));
}
fn throw_catch_simple() -> VM {
let vm = VM::new();
// .typedef @int64 = int<64>
// .typedef @ref_int64 = ref<int<64>>
// .typedef @iref_int64 = iref<int<64>>
let type_def_int64 = vm.declare_type(vm.next_id(), MuType_::int(64));
vm.set_name(type_def_int64.as_entity(), "int64".to_string());
let type_def_ref_int64 = vm.declare_type(vm.next_id(), MuType_::muref(type_def_int64.clone()));
vm.set_name(type_def_ref_int64.as_entity(), "ref_int64".to_string());
let type_def_iref_int64 = vm.declare_type(vm.next_id(), MuType_::iref(type_def_int64.clone()));
vm.set_name(type_def_iref_int64.as_entity(), "iref_int64".to_string());
// .const @int_64_0 <@int_64> = 0
// .const @int_64_1 <@int_64> = 1
let const_def_int64_0 = vm.declare_const(vm.next_id(), type_def_int64.clone(), Constant::Int(0));
vm.set_name(const_def_int64_0.as_entity(), "int64_0".to_string());
let const_def_int64_1 = vm.declare_const(vm.next_id(), type_def_int64.clone(), Constant::Int(1));
vm.set_name(const_def_int64_1.as_entity(), "int64_1".to_string());
declare_commons(&vm);
create_throw_exception_func(&vm);
create_catch_exception_func(&vm, true);
......@@ -307,25 +303,212 @@ fn test_exception_throw_catch_dont_use_exception_arg() {
fn throw_catch_dont_use_exception_arg() -> VM {
let vm = VM::new();
// .typedef @int64 = int<64>
// .typedef @ref_int64 = ref<int<64>>
// .typedef @iref_int64 = iref<int<64>>
let type_def_int64 = vm.declare_type(vm.next_id(), MuType_::int(64));
vm.set_name(type_def_int64.as_entity(), "int64".to_string());
let type_def_ref_int64 = vm.declare_type(vm.next_id(), MuType_::muref(type_def_int64.clone()));
vm.set_name(type_def_ref_int64.as_entity(), "ref_int64".to_string());
let type_def_iref_int64 = vm.declare_type(vm.next_id(), MuType_::iref(type_def_int64.clone()));
vm.set_name(type_def_iref_int64.as_entity(), "iref_int64".to_string());
// .const @int_64_0 <@int_64> = 0
// .const @int_64_1 <@int_64> = 1
let const_def_int64_0 = vm.declare_const(vm.next_id(), type_def_int64.clone(), Constant::Int(0));
vm.set_name(const_def_int64_0.as_entity(), "int64_0".to_string());
let const_def_int64_1 = vm.declare_const(vm.next_id(), type_def_int64.clone(), Constant::Int(1));
vm.set_name(const_def_int64_1.as_entity(), "int64_1".to_string());
declare_commons(&vm);
create_throw_exception_func(&vm);
create_catch_exception_func(&vm, false);
vm
}
#[test]
#[ignore]
// issue: didn't restore callee-saved register correctly, temporarily ignore this test
// FIXME: fix the bug
fn test_exception_throw_catch_and_add() {
VM::start_logging_trace();
let vm = Arc::new(throw_catch_and_add());
let compiler = Compiler::new(CompilerPolicy::default(), &vm);
let func_throw = vm.id_of("throw_exception");
let func_catch = vm.id_of("catch_and_add");
{
let funcs = vm.funcs().read().unwrap();
let func_vers = vm.func_vers().read().unwrap();
{
let func = funcs.get(&func_throw).unwrap().read().unwrap();
let mut func_ver = func_vers.get(&func.cur_ver.unwrap()).unwrap().write().unwrap();
compiler.compile(&mut func_ver);
}
{
let func = funcs.get(&func_catch).unwrap().read().unwrap();
let mut func_ver = func_vers.get(&func.cur_ver.unwrap()).unwrap().write().unwrap();
compiler.compile(&mut func_ver);
}
}
vm.make_primordial_thread(func_catch, true, vec![]);
backend::emit_context(&vm);
let executable = aot::link_primordial(vec![Mu("throw_exception"), Mu("catch_and_add")], "throw_catch_and_add", &vm);
let output = aot::execute_nocheck(executable);
// throw 1, add 0, 1, 2, 3, 4
assert!(output.status.code().is_some());
assert_eq!(output.status.code().unwrap(), 11);
}
fn throw_catch_and_add() -> VM {
let vm = VM::new();
declare_commons(&vm);
create_throw_exception_func(&vm);
create_catch_exception_and_add(&vm);
vm
}
fn create_catch_exception_and_add(vm: &VM) {
let throw_exception_sig = vm.get_func_sig(vm.id_of("throw_exception_sig"));
let throw_exception_id = vm.id_of("throw_exception");
let int64 = vm.get_type(vm.id_of("int64"));
constdef! ((vm) <int64> int64_0 = Constant::Int(0));
constdef! ((vm) <int64> int64_1 = Constant::Int(1));
constdef! ((vm) <int64> int64_2 = Constant::Int(2));
constdef! ((vm) <int64> int64_3 = Constant::Int(3));
constdef! ((vm) <int64> int64_4 = Constant::Int(4));
constdef! ((vm) <int64> int64_5 = Constant::Int(5));
typedef! ((vm) type_funcref_throw_exception = mu_funcref(throw_exception_sig));
constdef! ((vm) <type_funcref_throw_exception> const_funcref_throw_exception = Constant::FuncRef(throw_exception_id));
funcsig! ((vm) catch_exception_sig = () -> ());
funcdecl! ((vm) <catch_exception_sig> catch_and_add);
funcdef! ((vm) <catch_exception_sig> catch_and_add VERSION catch_and_add_v1);
// blk_entry
consta! ((vm, catch_and_add_v1) int0_local = int64_0);
consta! ((vm, catch_and_add_v1) int1_local = int64_1);
consta! ((vm, catch_and_add_v1) int2_local = int64_2);
consta! ((vm, catch_and_add_v1) int3_local = int64_3);
consta! ((vm, catch_and_add_v1) int4_local = int64_4);
block! ((vm, catch_and_add_v1) blk_entry);
block! ((vm, catch_and_add_v1) blk_main);
inst! ((vm, catch_and_add_v1) blk_entry_branch:
BRANCH blk_main (int0_local, int1_local, int2_local, int3_local, int4_local)
);
define_block! ((vm, catch_and_add_v1) blk_entry () {
blk_entry_branch
});
ssa! ((vm, catch_and_add_v1) <int64> v0);
ssa! ((vm, catch_and_add_v1) <int64> v1);
ssa! ((vm, catch_and_add_v1) <int64> v2);
ssa! ((vm, catch_and_add_v1) <int64> v3);
ssa! ((vm, catch_and_add_v1) <int64> v4);
// blk_main
consta! ((vm, catch_and_add_v1) funcref_throw_local = const_funcref_throw_exception);
block! ((vm, catch_and_add_v1) blk_normal);
block! ((vm, catch_and_add_v1) blk_exception);
inst! ((vm, catch_and_add_v1) blk_main_call:
CALL (funcref_throw_local, v0, v1, v2, v3, v4) FUNC(0) (vec![]) CallConvention::Mu,
normal: blk_normal (vec![]),
exc : blk_exception (vec![
DestArg::Normal(1),
DestArg::Normal(2),
DestArg::Normal(3),
DestArg::Normal(4),
DestArg::Normal(5),
])
);
define_block! ((vm, catch_and_add_v1) blk_main(v0, v1, v2, v3, v4) {
blk_main_call
});
// blk_normal
inst! ((vm, catch_and_add_v1) blk_normal_threadexit:
THREADEXIT
);
define_block! ((vm, catch_and_add_v1) blk_normal() {
blk_normal_threadexit
});
// blk_exception
ssa! ((vm, catch_and_add_v1) <int64> ev0);
ssa! ((vm, catch_and_add_v1) <int64> ev1);
ssa! ((vm, catch_and_add_v1) <int64> ev2);
ssa! ((vm, catch_and_add_v1) <int64> ev3);
ssa! ((vm, catch_and_add_v1) <int64> ev4);
ssa! ((vm, catch_and_add_v1) <int64> exc_arg);
inst! ((vm, catch_and_add_v1) blk_exception_px0:
PRINTHEX ev0
);
inst! ((vm, catch_and_add_v1) blk_exception_px1:
PRINTHEX ev1
);
inst! ((vm, catch_and_add_v1) blk_exception_px2:
PRINTHEX ev2
);
inst! ((vm, catch_and_add_v1) blk_exception_px3:
PRINTHEX ev3
);
inst! ((vm, catch_and_add_v1) blk_exception_px4:
PRINTHEX ev4
);
inst! ((vm, catch_and_add_v1) blk_exception_px5:
PRINTHEX exc_arg
);
ssa! ((vm, catch_and_add_v1) <int64> res0);
inst! ((vm, catch_and_add_v1) blk_exception_add0:
res0 = BINOP (BinOp::Add) exc_arg ev0
);
ssa! ((vm, catch_and_add_v1) <int64> res1);
inst! ((vm, catch_and_add_v1) blk_exception_add1:
res1 = BINOP (BinOp::Add) res0 ev1
);
ssa! ((vm, catch_and_add_v1) <int64> res2);