To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit 856903b6 authored by Isaac Oscar Gariano's avatar Isaac Oscar Gariano
Browse files

Improved swapstack implementation (WIP)

parent c24b8896
...@@ -176,8 +176,7 @@ impl Instruction { ...@@ -176,8 +176,7 @@ impl Instruction {
CommonInst_GetAddr(_) | CommonInst_GetAddr(_) |
PrintHex(_) | PrintHex(_) |
SetRetval(_) | SetRetval(_) |
KillStack(_) | KillStack(_) => true,
CurrentStack => true,
BinOp(_, _, _) | BinOp(_, _, _) |
BinOpWithStatus(_, _, _, _) | BinOpWithStatus(_, _, _, _) |
CmpOp(_, _, _) | CmpOp(_, _, _) |
...@@ -198,7 +197,8 @@ impl Instruction { ...@@ -198,7 +197,8 @@ impl Instruction {
CommonInst_Tr64ToInt(_) | CommonInst_Tr64ToInt(_) |
CommonInst_Tr64ToRef(_) | CommonInst_Tr64ToRef(_) |
CommonInst_Tr64ToTag(_) | CommonInst_Tr64ToTag(_) |
Move(_) => false Move(_) |
CurrentStack => false
} }
} }
......
...@@ -1037,52 +1037,6 @@ impl ASMCodeGen { ...@@ -1037,52 +1037,6 @@ impl ASMCodeGen {
self.cur_mut().code.push(ASMInst::symbolic(code)); self.cur_mut().code.push(ASMInst::symbolic(code));
} }
fn add_asm_call(
&mut self,
code: String,
potentially_excepting: Option<MuName>,
arguments: Vec<P<Value>>,
target: Option<(MuID, ASMLocation)>
) {
// a call instruction will use all the argument registers
// do not need
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
if target.is_some() {
let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]);
}
for arg in arguments {
uses.insert(arg.id(), vec![]);
}
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for reg in CALLER_SAVED_GPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
}
for reg in CALLER_SAVED_FPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
}
self.add_asm_inst_internal(
code,
defines,
uses,
false,
{
if potentially_excepting.is_some() {
ASMBranchTarget::PotentiallyExcepting(potentially_excepting.unwrap())
} else {
ASMBranchTarget::None
}
},
None
)
}
fn add_asm_inst( fn add_asm_inst(
&mut self, &mut self,
code: String, code: String,
...@@ -2162,6 +2116,48 @@ impl ASMCodeGen { ...@@ -2162,6 +2116,48 @@ impl ASMCodeGen {
self.add_asm_inst(asm, ignore_zero_register(id1, vec![loc1]), uses, false) self.add_asm_inst(asm, ignore_zero_register(id1, vec![loc1]), uses, false)
} }
fn internal_call(&mut self, callsite: Option<String>, code: String, pe: Option<MuName>, args: Vec<P<Value>>, ret: Vec<P<Value>>, target: Option<(MuID, ASMLocation)>, may_return: bool) -> Option<ValueLocation> {
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
if target.is_some() {
let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]);
}
for arg in args {
uses.insert(arg.id(), vec![]);
}
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for ret in ret.iter() {
defines.insert(ret.id(), vec![]);
}
self.add_asm_inst_internal(
code,
defines,
uses,
false,
{
if pe.is_some() {
ASMBranchTarget::PotentiallyExcepting(pe.unwrap())
} else if may_return {
ASMBranchTarget::None
} else {
ASMBranchTarget::Return
}
},
None
);
if callsite.is_some() {
let callsite_symbol = mangle_name(callsite.as_ref().unwrap().clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
Some(ValueLocation::Relocatable(RegGroup::GPR, callsite.unwrap()))
} else {
None
}
}
fn emit_ldr_spill(&mut self, dest: Reg, src: Mem) { fn emit_ldr_spill(&mut self, dest: Reg, src: Mem) {
self.internal_load("LDR", dest, src, false, true, false); self.internal_load("LDR", dest, src, false, true, false);
} }
...@@ -2395,12 +2391,13 @@ impl CodeGenerator for ASMCodeGen { ...@@ -2395,12 +2391,13 @@ impl CodeGenerator for ASMCodeGen {
fn emit_bl( fn emit_bl(
&mut self, &mut self,
callsite: String, callsite: Option<String>,
func: MuName, func: MuName,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>>, args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool is_native: bool
) -> ValueLocation { ) -> Option<ValueLocation> {
if is_native { if is_native {
trace_emit!("\tBL /*C*/ {}({:?})", func, args); trace_emit!("\tBL /*C*/ {}({:?})", func, args);
} else { } else {
...@@ -2413,34 +2410,27 @@ impl CodeGenerator for ASMCodeGen { ...@@ -2413,34 +2410,27 @@ impl CodeGenerator for ASMCodeGen {
mangle_name(func) mangle_name(func)
}; };
let mut ret = ret;
ret.push(LR.clone());
let asm = format!("BL {}", func); let asm = format!("BL {}", func);
self.add_asm_call(asm, pe, args, None); self.internal_call(callsite, asm, pe, args, ret, None, true)
let callsite_symbol = mangle_name(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
} }
fn emit_blr( fn emit_blr(
&mut self, &mut self,
callsite: String, callsite: Option<String>,
func: Reg, func: Reg,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>> args: Vec<P<Value>>,
) -> ValueLocation { ret: Vec<P<Value>>
) -> Option<ValueLocation> {
trace_emit!("\tBLR {}({:?})", func, args); trace_emit!("\tBLR {}({:?})", func, args);
let mut ret = ret;
ret.push(LR.clone());
let (reg1, id1, loc1) = self.prepare_reg(func, 3 + 1); let (reg1, id1, loc1) = self.prepare_reg(func, 3 + 1);
let asm = format!("BLR {}", reg1); let asm = format!("BLR {}", reg1);
self.add_asm_call(asm, pe, args, Some((id1, loc1))); self.internal_call(callsite, asm, pe, args, ret, Some((id1, loc1)), true)
let callsite_symbol = mangle_name(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
} }
...@@ -2458,23 +2448,35 @@ impl CodeGenerator for ASMCodeGen { ...@@ -2458,23 +2448,35 @@ impl CodeGenerator for ASMCodeGen {
None None
); );
} }
fn emit_b_func(&mut self, func_name: MuName, args: Vec<P<Value>>) {
trace_emit!("\tB {}({:?})", func_name, args);
let asm = format!("/*TAILCALL*/ B {}", mangle_name(func_name.clone())); fn emit_b_call(
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new(); &mut self,
for arg in args { callsite: Option<String>,
uses.insert(arg.id(), vec![]); func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool,
may_return: bool
) -> Option<ValueLocation> {
if is_native {
trace_emit!("\tB /*C*/ {}({:?})", func, args);
} else {
trace_emit!("\tB {}({:?})", func, args);
} }
self.add_asm_inst_internal(
asm, let func = if is_native {
linked_hashmap!{}, "/*C*/".to_string() + func.as_str()
uses, } else {
false, mangle_name(func)
ASMBranchTarget::Return, };
None
); let mut ret = ret;
ret.push(LR.clone());
let asm = format!("B {}", func);
self.internal_call(callsite, asm, pe, args, ret, None, may_return)
} }
fn emit_b_cond(&mut self, cond: &str, dest_name: MuName) { fn emit_b_cond(&mut self, cond: &str, dest_name: MuName) {
trace_emit!("\tB.{} {}", cond, dest_name); trace_emit!("\tB.{} {}", cond, dest_name);
...@@ -2503,35 +2505,25 @@ impl CodeGenerator for ASMCodeGen { ...@@ -2503,35 +2505,25 @@ impl CodeGenerator for ASMCodeGen {
None None
); );
} }
fn emit_br_func(&mut self, func_address: Reg, args: Vec<P<Value>>) {
trace_emit!("\tBR {}({:?})", func_address, args);
let (reg1, id1, loc1) = self.prepare_reg(func_address, 2 + 1);
let asm = format!("/*TAILCALL*/ BR {}", reg1);
fn emit_br_call(
&mut self,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
may_return: bool
) -> Option<ValueLocation> {
trace_emit!("\tBR {}({:?})", func, args);
let mut ret = ret;
ret.push(LR.clone());
let mut added_id1 = false; let (reg1, id1, loc1) = self.prepare_reg(func, 3 + 1);
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new(); let asm = format!("BR {}", reg1);
for arg in args { self.internal_call(callsite, asm, pe, args, ret, Some((id1, loc1)), may_return)
if arg.id() == id1 {
uses.insert(arg.id(), vec![loc1.clone()]);
added_id1 = true;
} else {
uses.insert(arg.id(), vec![]);
}
}
if !added_id1 {
uses.insert(id1, vec![loc1]);
}
self.add_asm_inst_internal(
asm,
linked_hashmap!{},
uses,
false,
ASMBranchTarget::Return,
None
);
} }
fn emit_cbnz(&mut self, src: Reg, dest_name: MuName) { fn emit_cbnz(&mut self, src: Reg, dest_name: MuName) {
self.internal_branch_op("CBNZ", src, dest_name); self.internal_branch_op("CBNZ", src, dest_name);
} }
......
...@@ -120,26 +120,45 @@ pub trait CodeGenerator { ...@@ -120,26 +120,45 @@ pub trait CodeGenerator {
// Calls // Calls
fn emit_bl( fn emit_bl(
&mut self, &mut self,
callsite: String, callsite: Option<String>,
func: MuName, func: MuName,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>>, args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool is_native: bool
) -> ValueLocation; ) -> Option<ValueLocation>;
fn emit_blr( fn emit_blr(
&mut self, &mut self,
callsite: String, callsite: Option<String>,
func: Reg, func: Reg,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>> args: Vec<P<Value>>,
) -> ValueLocation; ret: Vec<P<Value>>,
) -> Option<ValueLocation>;
// Branches // Branches
fn emit_b(&mut self, dest_name: MuName); fn emit_b(&mut self, dest_name: MuName);
fn emit_b_func(&mut self, func: MuName, args: Vec<P<Value>>); // For tail calls
fn emit_b_cond(&mut self, cond: &str, dest_name: MuName); fn emit_b_cond(&mut self, cond: &str, dest_name: MuName);
fn emit_br(&mut self, dest_address: Reg); fn emit_br(&mut self, dest_address: Reg);
fn emit_br_func(&mut self, func_address: Reg, args: Vec<P<Value>>); // For tail calls fn emit_b_call(
&mut self,
callsite: Option<String>,
func: MuName,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
is_native: bool,
may_return: bool
) -> Option<ValueLocation>;
fn emit_br_call(
&mut self,
callsite: Option<String>,
func: Reg,
pe: Option<MuName>,
args: Vec<P<Value>>,
ret: Vec<P<Value>>,
may_return: bool
) -> Option<ValueLocation>;
fn emit_ret(&mut self, src: Reg); fn emit_ret(&mut self, src: Reg);
fn emit_cbnz(&mut self, src: Reg, dest_name: MuName); fn emit_cbnz(&mut self, src: Reg, dest_name: MuName);
......
...@@ -23,6 +23,7 @@ use ast::op::*; ...@@ -23,6 +23,7 @@ use ast::op::*;
use ast::types::*; use ast::types::*;
use utils::math::align_up; use utils::math::align_up;
use utils::POINTER_SIZE; use utils::POINTER_SIZE;
use utils::WORD_SIZE;
use vm::VM; use vm::VM;
use runtime::mm; use runtime::mm;
use runtime::mm::OBJECT_HEADER_SIZE; use runtime::mm::OBJECT_HEADER_SIZE;
...@@ -1478,7 +1479,6 @@ impl<'a> InstructionSelection { ...@@ -1478,7 +1479,6 @@ impl<'a> InstructionSelection {
let tmp_res = self.get_result_value(node, 0); let tmp_res = self.get_result_value(node, 0);
// load [tl + STACK_OFFSET] -> tmp_res // load [tl + STACK_OFFSET] -> tmp_res
// WARNING: This assumes that an Option<Box<MuStack>> is actually just a pointer to a MuStack
emit_load_base_offset( emit_load_base_offset(
self.backend.as_mut(), self.backend.as_mut(),
&tmp_res, &tmp_res,
...@@ -4216,15 +4216,8 @@ impl<'a> InstructionSelection { ...@@ -4216,15 +4216,8 @@ impl<'a> InstructionSelection {
if vm.is_doing_jit() { if vm.is_doing_jit() {
unimplemented!() unimplemented!()
} else { } else {
let callsite = self.new_callsite_label(cur_node);
self.backend
.emit_bl(callsite.clone(), func_name, None, arg_regs, true);
// assume ccall wont throw exception // assume ccall wont throw exception
self.backend.emit_bl(None, func_name, None, arg_regs, CALLER_SAVED_REGS.to_vec(), true);
// TODO: What if theres an exception block?
self.current_callsites
.push_back((callsite, 0, stack_arg_size));
// record exception block (CCall may have an exception block) // record exception block (CCall may have an exception block)
if cur_node.is_some() { if cur_node.is_some() {
...@@ -4336,80 +4329,130 @@ impl<'a> InstructionSelection { ...@@ -4336,80 +4329,130 @@ impl<'a> InstructionSelection {
vm: &VM vm: &VM
) { ) {
let ref ops = inst.ops; let ref ops = inst.ops;
let swapee = self.emit_ireg(&ops[swapee], f_content, f_context, vm);
let new_sp = make_temporary(f_context, ADDRESS_TYPE.clone(), vm); // Calsite label, that will be used to mark the resumption pointer when
let cur_stack = make_temporary( // the current stack is swapped back
f_context, let callsite_label = if !is_kill {
if is_kill { Some(self.new_callsite_label(Some(node)))
STACKREF_TYPE.clone()
} else { } else {
ADDRESS_TYPE.clone() None
}, };
// Compute all the arguments...
let mut arg_values = vec![];
let arg_nodes = args.iter().map(|a| ops[*a].clone()).collect::<Vec<_>>();
for ref arg in &arg_nodes {
if match_node_imm(arg) {
arg_values.push(node_imm_to_value(arg))
} else if self.match_reg(arg) {
arg_values.push(self.emit_reg(arg, f_content, f_context, vm))
} else {
unimplemented!()
};
}
let tl = self.emit_get_threadlocal(f_context, vm);
let cur_stackref = make_temporary(f_context, STACKREF_TYPE.clone(), vm);
// Load the current stackref
emit_load_base_offset(
self.backend.as_mut(),
&cur_stackref,
&tl,
*thread::STACK_OFFSET as i64,
f_context,
vm vm
); );
// Prepare for stack swapping // Store the new stackref
self.emit_runtime_entry( let swapee = self.emit_ireg(&ops[swapee], f_content, f_context, vm);
if is_kill { emit_store_base_offset(
&entrypoints::PREPARE_SWAPSTACK_KILL self.backend.as_mut(),
} else { &tl,
&entrypoints::PREPARE_SWAPSTACK_RET *thread::STACK_OFFSET as i64,
}, &swapee,
vec![swapee.clone()],
Some(vec![new_sp.clone(), cur_stack.clone()]),
Some(node),
f_context, f_context,
vm vm
); );
// Compute the locations of return values, and how much space needs to be added to the stack
let res_tys = match inst.value { let res_tys = match inst.value {
Some(ref values) => values.iter().map(|v| v.ty.clone()).collect::<Vec<_>>(), Some(ref values) => values.iter().map(|v| v.ty.clone()).collect::<Vec<_>>(),
None => vec![] None => vec![]
}; };
let (_, res_locs, res_stack_size) = compute_argument_locations(&res_tys, &SP, 0, &vm); let (_, res_locs, res_stack_size) = compute_argument_locations(&res_tys, &SP, 0, &vm);
if !is_kill { if !is_kill {
// Load the callsite's address into LR
let callsite_value = make_value_symbolic(callsite_label.as_ref().unwrap().clone(),
false, &VOID_TYPE, vm);
self.backend.emit_adr(&LR, &callsite_value);
// Reserve space on the stack for the return values of the swap stack // Reserve space on the stack for the return values of the swap stack
emit_sub_u64(self.backend.as_mut(), &SP, &SP, res_stack_size as u64); emit_sub_u64(self.backend.as_mut(), &SP, &SP, res_stack_size as u64);
self.backend.emit_push_pair(&LR, &FP, &SP);
let cur_sp = make_temporary(f_context, STACKREF_TYPE.clone(), vm);
self.backend.emit_mov(&cur_sp, &SP);
// Save the current SP
emit_store_base_offset(
self.backend.as_mut(),
&cur_stackref,
*thread::MUSTACK_SP_OFFSET as i64,
&cur_sp,
f_context,
vm
);
} }
if is_exception { // Load the new sp from the swapee
assert!(args.len() == 1); // (Note: we cant load directly into the SP, so we have to use a temporary)
let new_sp = make_temporary(f_context, ADDRESS_TYPE.clone(), vm);
emit_load_base_offset(
self.backend.as_mut(),
&new_sp,
&swapee,
*thread::MUSTACK_SP_OFFSET as i64,
f_context,
vm
);
// Swap to the new stack
self.backend.emit_mov(&SP, &new_sp);
// TODO: MAKE SURE THE REGISTER ALLOCATOR DOSN'T DO SPILLING AFTER THIS POINT
if is_kill {
// Kill the old stack
self.emit_runtime_entry(
&entrypoints::KILL_STACK,
vec![cur_stackref],
None,
Some(node),