Commit 856903b6 authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano

Improved swapstack implementation (WIP)

parent c24b8896
......@@ -176,8 +176,7 @@ impl Instruction {
CommonInst_GetAddr(_) |
PrintHex(_) |
SetRetval(_) |
KillStack(_) |
CurrentStack => true,
KillStack(_) => true,
BinOp(_, _, _) |
BinOpWithStatus(_, _, _, _) |
CmpOp(_, _, _) |
......@@ -198,7 +197,8 @@ impl Instruction {
CommonInst_Tr64ToInt(_) |
CommonInst_Tr64ToRef(_) |
CommonInst_Tr64ToTag(_) |
Move(_) => false
Move(_) |
CurrentStack => false
}
}
......
......@@ -1037,52 +1037,6 @@ impl ASMCodeGen {
self.cur_mut().code.push(ASMInst::symbolic(code));
}
fn add_asm_call(
&mut self,
code: String,
potentially_excepting: Option<MuName>,
arguments: Vec<P<Value>>,
target: Option<(MuID, ASMLocation)>
) {
// a call instruction will use all the argument registers
// do not need
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
if target.is_some() {
let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]);
}
for arg in arguments {
uses.insert(arg.id(), vec![]);
}
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for reg in CALLER_SAVED_GPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
}
for reg in CALLER_SAVED_FPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
}
self.add_asm_inst_internal(
code,
defines,
uses,
false,
{
if potentially_excepting.is_some() {
ASMBranchTarget::PotentiallyExcepting(potentially_excepting.unwrap())
} else {
ASMBranchTarget::None
}
},
None
)
}
fn add_asm_inst(
&mut self,
code: String,
......@@ -2162,6 +2116,48 @@ impl ASMCodeGen {
self.add_asm_inst(asm, ignore_zero_register(id1, vec![loc1]), uses, false)
}
fn internal_call(&mut self, callsite: Option<String>, code: String, pe: Option<MuName>, args: Vec<P<Value>>, ret: Vec<P<Value>>, target: Option<(MuID, ASMLocation)>, may_return: bool) -> Option<ValueLocation> {
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
if target.is_some() {
let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]);
}
for arg in args {
uses.insert(arg.id(), vec![]);
}
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for ret in ret.iter() {
defines.insert(ret.id(), vec![]);
}
self.add_asm_inst_internal(
code,
defines,
uses,
false,
{
if pe.is_some() {
ASMBranchTarget::PotentiallyExcepting(pe.unwrap())
} else if may_return {
ASMBranchTarget::None
} else {
ASMBranchTarget::Return
}
},
None
);
if callsite.is_some() {
let callsite_symbol = mangle_name(callsite.as_ref().unwrap().clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
Some(ValueLocation::Relocatable(RegGroup::GPR, callsite.unwrap()))
} else {
None
}
}
fn emit_ldr_spill(&mut self, dest: Reg, src: Mem) {
self.internal_load("LDR", dest, src, false, true, false);
}
......@@ -2395,12 +2391,13 @@ impl CodeGenerator for ASMCodeGen {
fn emit_bl(
&mut self,
callsite: String,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool
) -> ValueLocation {
) -> Option<ValueLocation> {
if is_native {
trace_emit!("\tBL /*C*/ {}({:?})", func, args);
} else {
......@@ -2413,34 +2410,27 @@ impl CodeGenerator for ASMCodeGen {
mangle_name(func)
};
let mut ret = ret;
ret.push(LR.clone());
let asm = format!("BL {}", func);
self.add_asm_call(asm, pe, args, None);
let callsite_symbol = mangle_name(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
self.internal_call(callsite, asm, pe, args, ret, None, true)
}
fn emit_blr(
&mut self,
callsite: String,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>
) -> ValueLocation {
args: Vec<P<Value>>,
ret: Vec<P<Value>>
) -> Option<ValueLocation> {
trace_emit!("\tBLR {}({:?})", func, args);
let mut ret = ret;
ret.push(LR.clone());
let (reg1, id1, loc1) = self.prepare_reg(func, 3 + 1);
let asm = format!("BLR {}", reg1);
self.add_asm_call(asm, pe, args, Some((id1, loc1)));
let callsite_symbol = mangle_name(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
self.internal_call(callsite, asm, pe, args, ret, Some((id1, loc1)), true)
}
......@@ -2458,23 +2448,35 @@ impl CodeGenerator for ASMCodeGen {
None
);
}
fn emit_b_func(&mut self, func_name: MuName, args: Vec<P<Value>>) {
trace_emit!("\tB {}({:?})", func_name, args);
let asm = format!("/*TAILCALL*/ B {}", mangle_name(func_name.clone()));
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for arg in args {
uses.insert(arg.id(), vec![]);
fn emit_b_call(
&mut self,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool,
may_return: bool
) -> Option<ValueLocation> {
if is_native {
trace_emit!("\tB /*C*/ {}({:?})", func, args);
} else {
trace_emit!("\tB {}({:?})", func, args);
}
self.add_asm_inst_internal(
asm,
linked_hashmap!{},
uses,
false,
ASMBranchTarget::Return,
None
);
let func = if is_native {
"/*C*/".to_string() + func.as_str()
} else {
mangle_name(func)
};
let mut ret = ret;
ret.push(LR.clone());
let asm = format!("B {}", func);
self.internal_call(callsite, asm, pe, args, ret, None, may_return)
}
fn emit_b_cond(&mut self, cond: &str, dest_name: MuName) {
trace_emit!("\tB.{} {}", cond, dest_name);
......@@ -2503,35 +2505,25 @@ impl CodeGenerator for ASMCodeGen {
None
);
}
fn emit_br_func(&mut self, func_address: Reg, args: Vec<P<Value>>) {
trace_emit!("\tBR {}({:?})", func_address, args);
let (reg1, id1, loc1) = self.prepare_reg(func_address, 2 + 1);
let asm = format!("/*TAILCALL*/ BR {}", reg1);
fn emit_br_call(
&mut self,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
may_return: bool
) -> Option<ValueLocation> {
trace_emit!("\tBR {}({:?})", func, args);
let mut ret = ret;
ret.push(LR.clone());
let mut added_id1 = false;
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for arg in args {
if arg.id() == id1 {
uses.insert(arg.id(), vec![loc1.clone()]);
added_id1 = true;
} else {
uses.insert(arg.id(), vec![]);
}
}
if !added_id1 {
uses.insert(id1, vec![loc1]);
}
self.add_asm_inst_internal(
asm,
linked_hashmap!{},
uses,
false,
ASMBranchTarget::Return,
None
);
let (reg1, id1, loc1) = self.prepare_reg(func, 3 + 1);
let asm = format!("BR {}", reg1);
self.internal_call(callsite, asm, pe, args, ret, Some((id1, loc1)), may_return)
}
fn emit_cbnz(&mut self, src: Reg, dest_name: MuName) {
self.internal_branch_op("CBNZ", src, dest_name);
}
......
......@@ -120,26 +120,45 @@ pub trait CodeGenerator {
// Calls
fn emit_bl(
&mut self,
callsite: String,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool
) -> ValueLocation;
) -> Option<ValueLocation>;
fn emit_blr(
&mut self,
callsite: String,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>
) -> ValueLocation;
// Branches
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
) -> Option<ValueLocation>;
// Branches
fn emit_b(&mut self, dest_name: MuName);
fn emit_b_func(&mut self, func: MuName, args: Vec<P<Value>>); // For tail calls
fn emit_b_cond(&mut self, cond: &str, dest_name: MuName);
fn emit_br(&mut self, dest_address: Reg);
fn emit_br_func(&mut self, func_address: Reg, args: Vec<P<Value>>); // For tail calls
fn emit_b_call(
&mut self,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool,
may_return: bool
) -> Option<ValueLocation>;
fn emit_br_call(
&mut self,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
may_return: bool
) -> Option<ValueLocation>;
fn emit_ret(&mut self, src: Reg);
fn emit_cbnz(&mut self, src: Reg, dest_name: MuName);
......
......@@ -674,6 +674,54 @@ lazy_static! {
D15.clone()
];
pub static ref CALLER_SAVED_REGS : [P<Value>; 42] = [
X0.clone(),
X1.clone(),
X2.clone(),
X3.clone(),
X4.clone(),
X5.clone(),
X6.clone(),
X7.clone(),
X8.clone(),
X9.clone(),
X10.clone(),
X11.clone(),
X12.clone(),
X13.clone(),
X14.clone(),
X15.clone(),
X16.clone(),
X17.clone(),
//X18.clone(), // Platform Register
D0.clone(),
D1.clone(),
D2.clone(),
D3.clone(),
D4.clone(),
D5.clone(),
D6.clone(),
D7.clone(),
D16.clone(),
D17.clone(),
D18.clone(),
D19.clone(),
D20.clone(),
D21.clone(),
D22.clone(),
D23.clone(),
D24.clone(),
D25.clone(),
D26.clone(),
D27.clone(),
D28.clone(),
D29.clone(),
D30.clone(),
D31.clone()
];
pub static ref ALL_USABLE_GPRS : Vec<P<Value>> = vec![
X0.clone(),
X1.clone(),
......
......@@ -67,8 +67,7 @@ lazy_static! {
String::from("muentry_thread_exit")),
jit: RwLock::new(None),
};
}
lazy_static! {
// impl/decl: thread.rs
pub static ref NEW_STACK: RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig{
......@@ -180,6 +179,16 @@ lazy_static! {
jit: RwLock::new(None),
};
pub static ref THROW_EXCEPTION_INTERNAL : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![],
arg_tys: vec![ADDRESS_TYPE.clone(), ADDRESS_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_throw_exception_internal")),
jit: RwLock::new(None),
};
// impl/decl: math.rs
pub static ref FREM32 : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig{
......@@ -342,48 +351,4 @@ lazy_static! {
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_mem_zero")),
jit: RwLock::new(None)
};
// impl/decl: thread.rs
pub static ref PREPARE_SWAPSTACK_RET : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![ADDRESS_TYPE.clone(), ADDRESS_TYPE.clone()],
arg_tys: vec![STACKREF_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_prepare_swapstack_ret")),
jit: RwLock::new(None)
};
// impl/decl: thread.rs
pub static ref PREPARE_SWAPSTACK_KILL : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![ADDRESS_TYPE.clone(), STACKREF_TYPE.clone()],
arg_tys: vec![STACKREF_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_prepare_swapstack_kill")),
jit: RwLock::new(None)
};
// impl/decl: thread.rs
pub static ref SWAPSTACK_RET_THROW : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![],
arg_tys: vec![REF_VOID_TYPE.clone(), ADDRESS_TYPE.clone(), ADDRESS_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_swapstack_ret_throw")),
jit: RwLock::new(None)
};
// impl/decl: thread.rs
pub static ref SWAPSTACK_KILL_THROW : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![],
arg_tys: vec![REF_VOID_TYPE.clone(), ADDRESS_TYPE.clone(), STACKREF_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_swapstack_kill_throw")),
jit: RwLock::new(None)
};
}
......@@ -90,7 +90,6 @@ pub extern "C" fn throw_exception_internal(exception_obj: Address, frame_cursor:
};
// Check for a catch block at this callsite
// (there won't be one on the first iteration of this loop)
if callsite_info.exceptional_destination.is_some() {
catch_address = callsite_info.exceptional_destination.unwrap();
trace!("Found catch block: 0x{:x}", catch_address);
......
......@@ -45,59 +45,46 @@ begin_func exception_restore
BR X0
end_func exception_restore
# Swapstack internals
.macro stack_pass
# On the new stack, reverse the above
pop_callee_saved
exit_frame
RET
.endm
.macro stack_throw
# The new stack will have the same layout as the stack when muentry_throw_exception
# calls throw_exception_internal, so we can do that directly here
# Add the total size pushed by 'push_callee_saved' to get the FP for the new stack
ADD FP, SP, 144
MOV X1, FP
BL throw_exception_internal
.endm
.macro stack_ret old_sp
# starts a muthread that passes values to the target
# muthread_start_normal(new_sp: Address, old_sp_loc: Address)
# X0 , X1
begin_func muthread_start_normal
enter_frame
push_callee_saved
// Save the current SP to old_sp_loc
MOV X11, SP
STR X11, [\old_sp]
.endm
STR X11, [X1]
.macro stack_kill old_stack
mov_args_to_callee_saved
MOV X0, \old_stack
BL muentry_kill_stack
mov_callee_saved_to_args
.endm
// Swap to the new stack
MOV SP, X0
# starts a muthread that passes values to the target (with the given memory area containg values of argument registers)
# (new_sp: Address, old_sp_loc: Address)
# X0 X1,
begin_func muthread_start_pass
stack_ret X1
MOV X9, X0 # X1 will be overriden by the next instructions
load_arguments X9
MOV SP, X9
stack_pass
end_func muthread_start_pass
// Load the argument registers from the new stack
load_arguments SP
// Jump to the new stack
exit_frame
BR LR
end_func muthread_start_normal
# Same as muentry_swapstack_ret_throw
# starts a muthread that passes values to the target
# muthread_start_throw(exception: Address, new_sp: Address, old_sp_loc: &mut Adress)
# X0 X1 X2
begin_func muthread_start_throw
stack_ret X2
begin_func muthread_start_exceptional
enter_frame
push_callee_saved
// Save the current SP to old_sp_loc
MOV X11, SP
STR X11, [X1]
// Swap to the new stack
MOV SP, X1
stack_throw
end_func muthread_start_throw
SUB SP, SP, #144 // Alocate space for callee saved registers
B throw_exception_internal
// We won't be coming back...
end_func muthread_start_exceptional
# restores the thread
# (new_sp: Address)
......@@ -105,41 +92,9 @@ end_func muthread_start_throw
begin_func muentry_thread_exit
# Rust code will be responsible for actually killing the stack
MOV SP, X0
stack_pass
end_func muentry_thread_exit
# swap to the new stack whilst passing values and saving the old stack
# muentry_swapstack_ret_pass(new_stack args..., new_sp: Address, old_sp_loc: &mut Adress)
# X0 ... X7 [x8] X9 X10
begin_func muentry_swapstack_ret_pass
stack_ret X10
MOV SP, X9
stack_pass
end_func muentry_swapstack_ret_pass
# Same as swapstack_ret_pass except will throw an exception to the new stack instead of passing values
# muentry_swapstack_ret_throw(exception: Address, new_sp: Address, old_sp_loc: &mut Adress)
# X0 X1 X2
begin_func muentry_swapstack_ret_throw
stack_ret X2
MOV SP, X1
stack_throw
end_func muentry_swapstack_ret_throw
# swap to the new stack whilst passing values and killing the old stack
# muentry_swapstack_kill_pass(new_stack args..., new_sp: Address, old_stack: *mut MuStack)
# X0 ... X7 [x8] X9 X10
begin_func muentry_swapstack_kill_pass
MOV SP, X9
stack_kill X10
stack_pass
end_func muentry_swapstack_kill_pass
# Same as muentry_swapstack_kill_pass except will throw an exception to the new stack instead of passing values
# muentry_swapstack_kill_throw(exception: Address, new_sp: Address, old_stack: *mut MuStack)
# X0 X1 X2
begin_func muentry_swapstack_kill_throw
MOV SP, X1
stack_kill X2
stack_throw
end_func muentry_swapstack_kill_throw
// Do the inverse of 'muthread_*'
pop_callee_saved
exit_frame
BR LR
end_func muentry_thread_exit
\ No newline at end of file
......@@ -18,13 +18,11 @@ use ast::types::*;
use vm::VM;
use runtime::ValueLocation;
use runtime::mm;
use compiler::backend::CALLEE_SAVED_COUNT;
use utils::ByteSize;
use utils::Address;
use utils::Word;
use utils::POINTER_SIZE;
use utils::WORD_SIZE;
use utils::mem::memmap;
use utils::mem::memsec;
......@@ -70,9 +68,6 @@ impl_mu_entity!(MuStack);
pub struct MuStack {
pub hdr: MuEntityHeader,
/// entry function for the stack, represented as (address, func id)
func: Option<(ValueLocation, MuID)>,
/// stack size
size: ByteSize,
......@@ -106,7 +101,10 @@ pub struct MuStack {
#[allow(dead_code)]
mmap: Option<memmap::Mmap>
}
lazy_static!{
pub static ref MUSTACK_SP_OFFSET : usize =
offset_of!(MuStack=>sp).get_byte_offset();
}
impl MuStack {
/// creates a new MuStack for given entry function and function address
pub fn new(id: MuID, func_addr: Address, stack_arg_size: usize) -> MuStack {
......@@ -155,9 +153,6 @@ impl MuStack {
sp -= POINTER_SIZE;
unsafe { sp.store(Address::zero()); }
// Reserve space for callee saved registers (they will be loaded with undefined values)
sp -= WORD_SIZE*CALLEE_SAVED_COUNT;
debug!("creating stack {} with entry address {:?}", id, func_addr);
debug!("overflow_guard : {}", overflow_guard);
debug!("lower_bound : {}", lower_bound);
......@@ -167,7 +162,6 @@ impl MuStack {
MuStack {
hdr: MuEntityHeader::unnamed(id),
func: None,
state: MuStackState::Unknown,
size: STACK_SIZE,
......@@ -308,7 +302,7 @@ pub struct MuThread {
/// the allocator from memory manager
pub allocator: mm::Mutator,
/// current stack (a thread can execute different stacks, but one stack at a time)
pub stack: Option<Box<MuStack>>,
pub stack: *mut MuStack,
/// native stack pointer before we switch to this mu stack
/// (when the thread exits, we restore to native stack, and allow proper destruction)
pub native_sp_loc: Address,
......@@ -350,9 +344,8 @@ impl fmt::Display for MuThread {
).unwrap();
write!(
f,
"- stack @{:?}: {}\n",
&self.stack as *const Option<Box<MuStack>>,
self.stack.is_some()
"- stack @{:?}\n",
&self.stack as *const *mut MuStack
).unwrap();
write!(
f,
......@@ -389,9 +382,8 @@ extern "C" {
/// we swap to mu stack and execute the entry function
/// args:
/// new_sp: stack pointer for the mu stack
/// entry : entry function for the mu stack
/// old_sp_loc: the location to store native stack pointer so we can later swap back
fn muthread_start_pass(new_sp: Address, old_sp_loc: Address);
fn muthread_start_normal(new_sp: Address, old_sp_loc: Address);
/// gets base poniter for current frame
pub fn get_current_frame_bp() -> Address;
......@@ -484,6 +476,7 @@ impl MuThread {
// set thread local
unsafe { set_thread_local(&mut muthread) };
trace!("new MuThread @{}", Address::from_ref(&mut muthread));
let addr = unsafe { muentry_get_thread_local() };
let sp_threadlocal_loc = addr + *NATIVE_SP_LOC_OFFSET;
......@@ -491,7 +484,7 @@ impl MuThread {
debug!("sp_store: 0x{:x}", sp_threadlocal_loc);
unsafe {