WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit 917ad50b authored by qinsoon's avatar qinsoon
Browse files

[wip] some tests run. the ones related with stack args and exception

continuation dont work
parent bd61aa2e
......@@ -4648,7 +4648,7 @@ impl<'a> InstructionSelection {
// Throw an exception, don't call the swapee's resumption point
self.backend.emit_b_call(
callsite_label,
"throw_exception_internal".to_string(),
entrypoints::THROW_EXCEPTION_INTERNAL.aot.to_relocatable(),
potentially_excepting,
arg_regs,
ALL_USABLE_MACHINE_REGS.to_vec(),
......
......@@ -3289,15 +3289,11 @@ impl CodeGenerator for ASMCodeGen {
defs: Vec<P<Value>>,
is_native: bool
) -> ValueLocation {
if is_native {
trace!("emit: call /*C*/ {}({:?})", func, uses);
} else {
trace!("emit: call {}({:?})", func, uses);
}
let func = if is_native {
trace!("emit: call /*C*/ {}({:?})", func, uses);
"/*C*/".to_string() + symbol(func).as_str()
} else {
trace!("emit: call {}({:?})", func, uses);
symbol(mangle_name(func))
};
......@@ -3345,6 +3341,54 @@ impl CodeGenerator for ASMCodeGen {
unimplemented!()
}
fn emit_call_jmp(
&mut self,
callsite: String,
func: MuName,
pe: Option<MuName>,
uses: Vec<P<Value>>,
defs: Vec<P<Value>>,
is_native: bool
) -> ValueLocation {
let func = if is_native {
trace!("emit: call/jmp /*C*/ {}({:?})", func, uses);
"/*C*/".to_string() + symbol(func).as_str()
} else {
trace!("emit: call/jmp {}({:?})", func, uses);
symbol(mangle_name(func))
};
let asm = if cfg!(target_os = "macos") {
format!("/*CALL*/ jmp {}", func)
} else {
format!("/*CALL*/ jmp {}@PLT", func)
};
self.add_asm_call(asm, pe, uses, defs, None);
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_call_jmp_indirect(
&mut self,
callsite: String,
func: &P<Value>,
pe: Option<MuName>,
uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation {
trace!("emit: call/jmp {}", func);
let (reg, id, loc) = self.prepare_reg(func, 6);
let asm = format!("/*CALL*/ jmp *{}", reg);
// the call uses the register
self.add_asm_call(asm, pe, uses, defs, Some((id, loc)));
self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_ret(&mut self) {
trace!("emit: ret");
......
......@@ -250,6 +250,25 @@ pub trait CodeGenerator {
defs: Vec<P<Value>>
) -> ValueLocation;
// sometimes we use jmp as a call (but without pushing return address)
fn emit_call_jmp(
&mut self,
callsite: String,
func: MuName,
pe: Option<MuName>,
uses: Vec<P<Value>>,
defs: Vec<P<Value>>,
is_native: bool
) -> ValueLocation;
fn emit_call_jmp_indirect(
&mut self,
callsite: String,
func: &P<Value>,
pe: Option<MuName>,
uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation;
fn emit_ret(&mut self);
// push/pop
......
......@@ -4495,9 +4495,11 @@ impl<'a> InstructionSelection {
if vm.is_doing_jit() {
unimplemented!()
} else {
// reserve space on the stack for the return values of swapstack
self.backend
.emit_sub_r_imm(&x86_64::RSP, res_stack_size as i32);
if res_stack_size != 0 {
// reserve space on the stack for the return values of swapstack
self.backend
.emit_sub_r_imm(&x86_64::RSP, res_stack_size as i32);
}
// get return address (the instruction after the call
let tmp_callsite_addr_loc = self.make_memory_symbolic_normal(
......@@ -4508,7 +4510,7 @@ impl<'a> InstructionSelection {
);
let tmp_callsite = self.make_temporary(f_context, ADDRESS_TYPE.clone(), vm);
self.backend
.emit_mov_r_mem(&tmp_callsite, &tmp_callsite_addr_loc);
.emit_lea_r64(&tmp_callsite, &tmp_callsite_addr_loc);
// push return address
self.backend.emit_push_r64(&tmp_callsite);
......@@ -4603,7 +4605,7 @@ impl<'a> InstructionSelection {
// throws an exception
// we are calling the internal ones as return address and base pointer are already
// on the stack. and also we are saving all usable registers
self.backend.emit_call_near_rel32(
self.backend.emit_call_jmp(
callsite_label.clone(),
entrypoints::THROW_EXCEPTION_INTERNAL.aot.to_relocatable(),
potential_exception_dest,
......@@ -4618,8 +4620,12 @@ impl<'a> InstructionSelection {
// pop resumption address into rax
self.backend.emit_pop_r64(&x86_64::RAX);
// call to the resumption
self.backend.emit_call_near_r64(
// push 0 - a fake return address
// so that SP+8 is 16 bytes aligned (the same requirement as entring a function)
self.backend.emit_push_imm32(0i32);
// jmp to the resumption
self.backend.emit_call_jmp_indirect(
callsite_label.clone(),
&x86_64::RAX,
potential_exception_dest,
......@@ -4646,6 +4652,9 @@ impl<'a> InstructionSelection {
self.start_block(block);
}
// pop the fake return address
self.backend.emit_add_r_imm(&x86_64::RSP, 8);
// unload return values (arguments)
let return_values = res_vals;
let return_tys = return_values.iter().map(|x| x.ty.clone()).collect();
......@@ -4655,8 +4664,10 @@ impl<'a> InstructionSelection {
self.emit_postcall_unload_vals(&return_values, &callconv, f_context, vm);
// collapse return value on stack
self.backend
.emit_add_r_imm(&x86_64::RSP, res_stack_size as i32);
if res_stack_size != 0 {
self.backend
.emit_add_r_imm(&x86_64::RSP, res_stack_size as i32);
}
}
}
......
......@@ -143,7 +143,9 @@ impl MuStack {
// Set up the stack
let mut sp = upper_bound;
sp -= stack_arg_size; // Allocate space for the arguments
// Allocate space for the arguments
sp -= stack_arg_size;
// Push entry as the return address
sp -= POINTER_SIZE;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment