To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit 3954f23e authored by qinsoon's avatar qinsoon
Browse files

[wip] implemented swapstack for x64, debugging on it

parent 4812aa9a
...@@ -2127,7 +2127,7 @@ impl<'a> InstructionSelection { ...@@ -2127,7 +2127,7 @@ impl<'a> InstructionSelection {
is_exception, is_exception,
ref args ref args
} => { } => {
trace!("Instruction Selection on SWPASTACK-KILL"); trace!("Instruction Selection on SWAPSTACK-KILL");
self.emit_swapstack( self.emit_swapstack(
is_exception, // is_exception is_exception, // is_exception
true, // is_kill true, // is_kill
......
...@@ -1199,7 +1199,8 @@ impl ASMCodeGen { ...@@ -1199,7 +1199,8 @@ impl ASMCodeGen {
&mut self, &mut self,
code: String, code: String,
potentially_excepting: Option<MuName>, potentially_excepting: Option<MuName>,
arguments: Vec<P<Value>>, use_vec: Vec<P<Value>>,
def_vec: Vec<P<Value>>,
target: Option<(MuID, ASMLocation)> target: Option<(MuID, ASMLocation)>
) { ) {
let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new(); let mut uses: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
...@@ -1207,20 +1208,13 @@ impl ASMCodeGen { ...@@ -1207,20 +1208,13 @@ impl ASMCodeGen {
let (id, loc) = target.unwrap(); let (id, loc) = target.unwrap();
uses.insert(id, vec![loc]); uses.insert(id, vec![loc]);
} }
for arg in arguments { for u in use_vec {
uses.insert(arg.id(), vec![]); uses.insert(u.id(), vec![]);
} }
let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new(); let mut defines: LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
for reg in x86_64::CALLER_SAVED_GPRS.iter() { for d in def_vec {
if !defines.contains_key(&reg.id()) { defines.insert(d.id(), vec![]);
defines.insert(reg.id(), vec![]);
}
}
for reg in x86_64::CALLER_SAVED_FPRS.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
}
} }
self.add_asm_inst_internal( self.add_asm_inst_internal(
...@@ -3291,13 +3285,14 @@ impl CodeGenerator for ASMCodeGen { ...@@ -3291,13 +3285,14 @@ impl CodeGenerator for ASMCodeGen {
callsite: String, callsite: String,
func: MuName, func: MuName,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>>, uses: Vec<P<Value>>,
defs: Vec<P<Value>>,
is_native: bool is_native: bool
) -> ValueLocation { ) -> ValueLocation {
if is_native { if is_native {
trace!("emit: call /*C*/ {}({:?})", func, args); trace!("emit: call /*C*/ {}({:?})", func, uses);
} else { } else {
trace!("emit: call {}({:?})", func, args); trace!("emit: call {}({:?})", func, uses);
} }
let func = if is_native { let func = if is_native {
...@@ -3312,7 +3307,7 @@ impl CodeGenerator for ASMCodeGen { ...@@ -3312,7 +3307,7 @@ impl CodeGenerator for ASMCodeGen {
format!("call {}@PLT", func) format!("call {}@PLT", func)
}; };
self.add_asm_call(asm, pe, args, None); self.add_asm_call(asm, pe, uses, defs, None);
self.add_asm_global_label(symbol(mangle_name(callsite.clone()))); self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite) ValueLocation::Relocatable(RegGroup::GPR, callsite)
...@@ -3323,14 +3318,15 @@ impl CodeGenerator for ASMCodeGen { ...@@ -3323,14 +3318,15 @@ impl CodeGenerator for ASMCodeGen {
callsite: String, callsite: String,
func: &P<Value>, func: &P<Value>,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>> uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation { ) -> ValueLocation {
trace!("emit: call {}", func); trace!("emit: call {}", func);
let (reg, id, loc) = self.prepare_reg(func, 6); let (reg, id, loc) = self.prepare_reg(func, 6);
let asm = format!("call *{}", reg); let asm = format!("call *{}", reg);
// the call uses the register // the call uses the register
self.add_asm_call(asm, pe, args, Some((id, loc))); self.add_asm_call(asm, pe, uses, defs, Some((id, loc)));
self.add_asm_global_label(symbol(mangle_name(callsite.clone()))); self.add_asm_global_label(symbol(mangle_name(callsite.clone())));
ValueLocation::Relocatable(RegGroup::GPR, callsite) ValueLocation::Relocatable(RegGroup::GPR, callsite)
...@@ -3342,7 +3338,8 @@ impl CodeGenerator for ASMCodeGen { ...@@ -3342,7 +3338,8 @@ impl CodeGenerator for ASMCodeGen {
callsite: String, callsite: String,
func: &P<Value>, func: &P<Value>,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>> uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation { ) -> ValueLocation {
trace!("emit: call {}", func); trace!("emit: call {}", func);
unimplemented!() unimplemented!()
......
...@@ -19,16 +19,19 @@ pub mod mu { ...@@ -19,16 +19,19 @@ pub mod mu {
pub use super::c::*; pub use super::c::*;
} }
pub mod swapstack {
pub use super::c::compute_arguments;
pub use super::c::compute_stack_args;
pub use super::c::compute_arguments as compute_return_values;
pub use super::c::compute_stack_args as compute_stack_retvals;
}
pub mod c { pub mod c {
use super::*; use super::*;
/// computes arguments for the function signature, /// computes arguments for the function signature,
/// returns a vector of CallConvResult for each argument type /// returns a vector of CallConvResult for each argument type
pub fn compute_arguments(sig: &MuFuncSig) -> Vec<CallConvResult> { pub fn compute_arguments(tys: &Vec<P<MuType>>) -> Vec<CallConvResult> {
compute_arguments_by_type(&sig.arg_tys)
}
pub fn compute_arguments_by_type(tys: &Vec<P<MuType>>) -> Vec<CallConvResult> {
let mut ret = vec![]; let mut ret = vec![];
let mut gpr_arg_count = 0; let mut gpr_arg_count = 0;
...@@ -80,15 +83,30 @@ pub mod c { ...@@ -80,15 +83,30 @@ pub mod c {
ret ret
} }
pub fn compute_stack_args(tys: &Vec<P<MuType>>, vm: &VM) -> (ByteSize, Vec<ByteSize>) {
let callconv = compute_arguments(tys);
let mut stack_arg_tys = vec![];
for i in 0..callconv.len() {
let ref cc = callconv[i];
match cc {
&CallConvResult::STACK => stack_arg_tys.push(tys[i].clone()),
_ => {}
}
}
compute_stack_locations(&stack_arg_tys, vm)
}
/// computes the return values for the function signature, /// computes the return values for the function signature,
/// returns a vector of CallConvResult for each return type /// returns a vector of CallConvResult for each return type
pub fn compute_return_values(sig: &MuFuncSig) -> Vec<CallConvResult> { pub fn compute_return_values(tys: &Vec<P<MuType>>) -> Vec<CallConvResult> {
let mut ret = vec![]; let mut ret = vec![];
let mut gpr_ret_count = 0; let mut gpr_ret_count = 0;
let mut fpr_ret_count = 0; let mut fpr_ret_count = 0;
for ty in sig.ret_tys.iter() { for ty in tys.iter() {
if RegGroup::get_from_ty(ty) == RegGroup::GPR { if RegGroup::get_from_ty(ty) == RegGroup::GPR {
if gpr_ret_count < x86_64::RETURN_GPRS.len() { if gpr_ret_count < x86_64::RETURN_GPRS.len() {
let ret_gpr = { let ret_gpr = {
...@@ -131,29 +149,29 @@ pub mod c { ...@@ -131,29 +149,29 @@ pub mod c {
ret ret
} }
pub fn compute_stack_args(sig: &MuFuncSig, vm: &VM) -> (ByteSize, Vec<ByteSize>) { pub fn compute_stack_retvals(tys: &Vec<P<MuType>>, vm: &VM) -> (ByteSize, Vec<ByteSize>) {
let callconv = compute_arguments(sig); let callconv = compute_return_values(tys);
let mut stack_arg_tys = vec![]; let mut stack_ret_val_tys = vec![];
for i in 0..callconv.len() { for i in 0..callconv.len() {
let ref cc = callconv[i]; let ref cc = callconv[i];
match cc { match cc {
&CallConvResult::STACK => stack_arg_tys.push(sig.arg_tys[i].clone()), &CallConvResult::STACK => stack_ret_val_tys.push(tys[i].clone()),
_ => {} _ => {}
} }
} }
compute_stack_args_by_type(&stack_arg_tys, vm) compute_stack_locations(&stack_ret_val_tys, vm)
} }
/// computes the return area on the stack for the function signature, /// computes the area on the stack for a list of types that need to put on stack,
/// returns a tuple of (size, callcand offset for each stack arguments) /// returns a tuple of (size, offset for each values on stack)
pub fn compute_stack_args_by_type( pub fn compute_stack_locations(
stack_arg_tys: &Vec<P<MuType>>, stack_val_tys: &Vec<P<MuType>>,
vm: &VM vm: &VM
) -> (ByteSize, Vec<ByteSize>) { ) -> (ByteSize, Vec<ByteSize>) {
let (stack_arg_size, _, stack_arg_offsets) = let (stack_arg_size, _, stack_arg_offsets) =
BackendType::sequential_layout(stack_arg_tys, vm); BackendType::sequential_layout(stack_val_tys, vm);
// "The end of the input argument area shall be aligned on a 16 // "The end of the input argument area shall be aligned on a 16
// (32, if __m256 is passed on stack) byte boundary." - x86 ABI // (32, if __m256 is passed on stack) byte boundary." - x86 ABI
......
...@@ -229,7 +229,8 @@ pub trait CodeGenerator { ...@@ -229,7 +229,8 @@ pub trait CodeGenerator {
callsite: String, callsite: String,
func: MuName, func: MuName,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>>, uses: Vec<P<Value>>,
defs: Vec<P<Value>>,
is_native: bool is_native: bool
) -> ValueLocation; ) -> ValueLocation;
fn emit_call_near_r64( fn emit_call_near_r64(
...@@ -237,14 +238,16 @@ pub trait CodeGenerator { ...@@ -237,14 +238,16 @@ pub trait CodeGenerator {
callsite: String, callsite: String,
func: &P<Value>, func: &P<Value>,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>> uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation; ) -> ValueLocation;
fn emit_call_near_mem64( fn emit_call_near_mem64(
&mut self, &mut self,
callsite: String, callsite: String,
func: &P<Value>, func: &P<Value>,
pe: Option<MuName>, pe: Option<MuName>,
args: Vec<P<Value>> uses: Vec<P<Value>>,
defs: Vec<P<Value>>
) -> ValueLocation; ) -> ValueLocation;
fn emit_ret(&mut self); fn emit_ret(&mut self);
......
...@@ -501,6 +501,18 @@ lazy_static! { ...@@ -501,6 +501,18 @@ lazy_static! {
ret.extend_from_slice(&ALL_USABLE_FPRS); ret.extend_from_slice(&ALL_USABLE_FPRS);
ret ret
}; };
/// all the caller saved registers
pub static ref ALL_CALLER_SAVED_REGS : Vec<P<Value>> = {
let mut ret = vec![];
for r in CALLER_SAVED_GPRS.iter() {
ret.push(r.clone());
}
for r in CALLER_SAVED_FPRS.iter() {
ret.push(r.clone());
}
ret
};
} }
/// creates context for each machine register in FunctionContext /// creates context for each machine register in FunctionContext
......
...@@ -204,6 +204,18 @@ lazy_static! { ...@@ -204,6 +204,18 @@ lazy_static! {
jit: RwLock::new(None), jit: RwLock::new(None),
}; };
pub static ref THROW_EXCEPTION_INTERNAL: RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![],
arg_tys: vec![ADDRESS_TYPE.clone(), ADDRESS_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("throw_exception_internal")),
jit: RwLock::new(None)
};
}
lazy_static!{
// impl/decl: math.rs // impl/decl: math.rs
pub static ref FREM32 : RuntimeEntrypoint = RuntimeEntrypoint { pub static ref FREM32 : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig{ sig: P(MuFuncSig{
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment