WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.2% of users enabled 2FA.

Commit 214d799f authored by Kunshan Wang's avatar Kunshan Wang
Browse files

Merge remote-tracking branch 'origin/master'

parents e39e0e4d 26dde85c
......@@ -88,7 +88,7 @@ pub enum Instruction_ {
// yields a tuple of results from the call
ExprCall{
data: CallData,
is_abort: bool, // T to abort, F to rethrow
is_abort: bool, // T to abort, F to rethrow - FIXME: current, always rethrow for now
},
// yields the memory value
......@@ -221,6 +221,10 @@ pub enum Instruction_ {
data: CallData,
resume: ResumptionData
},
CCall{
data: CallData,
resume: ResumptionData
},
SwapStack{
stack: OpIndex,
is_exception: bool,
......@@ -318,6 +322,7 @@ impl Instruction_ {
format!("WPBRANCH {} {} {}", wp, disable_dest.debug_str(ops), enable_dest.debug_str(ops))
},
&Instruction_::Call{ref data, ref resume} => format!("CALL {} {}", data.debug_str(ops), resume.debug_str(ops)),
&Instruction_::CCall{ref data, ref resume} => format!("CALL {} {}", data.debug_str(ops), resume.debug_str(ops)),
&Instruction_::SwapStack{stack, is_exception, ref args, ref resume} => {
format!("SWAPSTACK {} {} {} {}", ops[stack], is_exception, op_vector_str(args, ops), resume.debug_str(ops))
},
......@@ -396,7 +401,7 @@ pub struct Destination {
impl Destination {
fn debug_str(&self, ops: &Vec<P<TreeNode>>) -> String {
let mut ret = format!("{}", self.target);
let mut ret = format!("{} with ", self.target);
ret.push('[');
for i in 0..self.args.len() {
let ref arg = self.args[i];
......
......@@ -287,6 +287,10 @@ impl Block {
pub fn new(id: MuID) -> Block {
Block{hdr: MuEntityHeader::unnamed(id), content: None, control_flow: ControlFlow::default()}
}
pub fn is_exception_block(&self) -> bool {
return self.content.as_ref().unwrap().exn_arg.is_some()
}
}
#[derive(Debug, RustcEncodable, RustcDecodable)]
......
......@@ -33,6 +33,7 @@ pub fn is_terminal_inst(inst: &Instruction_) -> bool {
| &Watchpoint{..}
| &WPBranch{..}
| &Call{..}
| &CCall{..}
| &SwapStack{..}
| &Switch{..}
| &ExnInstruction{..} => true
......@@ -76,6 +77,7 @@ pub fn has_side_effect(inst: &Instruction_) -> bool {
&Watchpoint{..} => true,
&WPBranch{..} => true,
&Call{..} => true,
&CCall{..} => true,
&SwapStack{..} => true,
&Switch{..} => true,
&ExnInstruction{..} => true
......
......@@ -27,6 +27,7 @@ pub enum OpCode {
Watchpoint,
WPBranch,
Call,
CCall,
SwapStack,
Switch,
ExnInstruction,
......@@ -233,6 +234,7 @@ pub fn pick_op_code_for_inst(inst: &Instruction) -> OpCode {
Instruction_::Watchpoint{..} => OpCode::Watchpoint,
Instruction_::WPBranch{..} => OpCode::WPBranch,
Instruction_::Call{..} => OpCode::Call,
Instruction_::CCall{..} => OpCode::CCall,
Instruction_::SwapStack{..} => OpCode::SwapStack,
Instruction_::Switch{..} => OpCode::Switch,
Instruction_::ExnInstruction{..} => OpCode::ExnInstruction
......
......@@ -670,7 +670,8 @@ impl CodeGenerator for ASMCodeGen {
symbol.push_str("_end");
symbol
};
self.add_asm_symbolic(directive_globl(func_end_symbol.clone()));
self.add_asm_symbolic(directive_globl(func_end_symbol.clone()));
self.add_asm_symbolic(format!("{}:", func_end_symbol.clone()));
self.control_flow_analysis();
......@@ -708,6 +709,16 @@ impl CodeGenerator for ASMCodeGen {
self.cur_mut().block_start.insert(block_name, start);
}
fn start_exception_block(&mut self, block_name: MuName) -> ValueLocation {
let block_symbol = symbol(self.asm_block_label(block_name.clone()));
self.add_asm_symbolic(directive_globl(block_symbol.clone()));
self.add_asm_symbolic(format!("{}:", block_symbol.clone()));
self.start_block(block_name);
ValueLocation::Relocatable(RegGroup::GPR, block_symbol)
}
fn end_block(&mut self, block_name: MuName) {
let start : usize = *self.cur().block_start.get(&block_name).unwrap();
let end : usize = self.line();
......@@ -1141,21 +1152,25 @@ impl CodeGenerator for ASMCodeGen {
self.add_asm_branch2(asm, dest_name);
}
fn emit_call_near_rel32(&mut self, func: MuName) {
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation {
trace!("emit: call {}", func);
let callsite_symbol = symbol(callsite);
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
let asm = format!("call {}", symbol(func));
self.add_asm_call(asm);
// FIXME: call interferes with machine registers
ValueLocation::Relocatable(RegGroup::GPR, callsite_symbol)
}
fn emit_call_near_r64(&mut self, func: &P<Value>) {
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
}
fn emit_call_near_mem64(&mut self, func: &P<Value>) {
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
}
......
......@@ -11,6 +11,7 @@ pub trait CodeGenerator {
fn print_cur_code(&self);
fn start_block(&mut self, block_name: MuName);
fn start_exception_block(&mut self, block_name: MuName) -> ValueLocation;
fn set_block_livein(&mut self, block_name: MuName, live_in: &Vec<P<Value>>);
fn set_block_liveout(&mut self, block_name: MuName, live_out: &Vec<P<Value>>);
fn end_block(&mut self, block_name: MuName);
......@@ -53,9 +54,9 @@ pub trait CodeGenerator {
fn emit_jl(&mut self, dest: MuName);
fn emit_jle(&mut self, dest: MuName);
fn emit_call_near_rel32(&mut self, func: MuName);
fn emit_call_near_r64(&mut self, func: &P<Value>);
fn emit_call_near_mem64(&mut self, func: &P<Value>);
fn emit_call_near_rel32(&mut self, callsite: String, func: MuName) -> ValueLocation;
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation;
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>) -> ValueLocation;
fn emit_ret(&mut self);
......
use ast::ir::*;
use ast::ptr::*;
use ast::types::*;
use runtime::ValueLocation;
use std::collections::HashMap;
use utils::POINTER_SIZE;
use vm::VM;
type SlotID = usize;
// | previous frame ...
// |---------------
// | return address
......@@ -21,18 +20,21 @@ use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
#[derive(RustcEncodable, RustcDecodable)]
pub struct Frame {
cur_slot_id: SlotID,
func_ver_id: MuID,
cur_offset: isize, // offset to rbp
allocated: HashMap<SlotID, FrameSlot>,
pub allocated: HashMap<MuID, FrameSlot>,
// key: callsite, val: destination address
pub exception_callsites: HashMap<ValueLocation, ValueLocation>
}
impl Frame {
pub fn new() -> Frame {
pub fn new(func_ver_id: MuID) -> Frame {
Frame {
cur_slot_id: 0,
func_ver_id: func_ver_id,
cur_offset: - (POINTER_SIZE as isize * 1), // reserve for old RBP
allocated: HashMap::new()
allocated: HashMap::new(),
exception_callsites: HashMap::new()
}
}
......@@ -46,29 +48,28 @@ impl Frame {
slot.make_memory_op(reg.ty.clone(), vm)
}
pub fn add_exception_callsite(&mut self, callsite: ValueLocation, dest: ValueLocation) {
self.exception_callsites.insert(callsite, dest);
}
fn alloc_slot(&mut self, val: &P<Value>, vm: &VM) -> &FrameSlot {
let id = self.cur_slot_id;
let id = val.id();
let ret = FrameSlot {
id: id,
offset: self.cur_offset,
value: val.clone()
};
self.cur_slot_id += 1;
self.cur_offset -= vm.get_type_size(val.ty.id()) as isize;
self.allocated.insert(id, ret);
self.allocated.get(&id).unwrap()
}
}
#[derive(RustcEncodable, RustcDecodable)]
struct FrameSlot {
id: SlotID,
offset: isize,
value: P<Value>
pub struct FrameSlot {
pub offset: isize,
pub value: P<Value>
}
impl FrameSlot {
......
......@@ -132,7 +132,7 @@ pub extern fn alloc(mutator: *mut ImmixMutatorLocal, size: usize, align: usize)
#[no_mangle]
#[inline(never)]
pub extern fn alloc_slow(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
pub extern fn muentry_alloc_slow(mutator: *mut ImmixMutatorLocal, size: usize, align: usize) -> ObjectReference {
let ret = unsafe {mutator.as_mut().unwrap()}.try_alloc_from_local(size, align);
unsafe {ret.to_object_reference()}
}
......
......@@ -18,33 +18,49 @@ pub struct RuntimeEntrypoint {
}
lazy_static! {
// impl: runtime_x64_macos.c
// decl: thread.rs
pub static ref GET_THREAD_LOCAL : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![ADDRESS_TYPE.clone()],
arg_tys: vec![]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("get_thread_local")),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_get_thread_local")),
jit: RwLock::new(None),
};
// impl: swap_stack_x64_macos.s
// decl: thread.rs
pub static ref SWAP_BACK_TO_NATIVE_STACK : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig{
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![],
arg_tys: vec![ADDRESS_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("swap_back_to_native_stack")),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_swap_back_to_native_stack")),
jit: RwLock::new(None),
};
// impl/decl: gc/lib.rs
pub static ref ALLOC_SLOW : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![ADDRESS_TYPE.clone()],
arg_tys: vec![UINT64_TYPE.clone(), UINT64_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("alloc_slow")),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_alloc_slow")),
jit: RwLock::new(None),
};
// impl/decl: exception.rs
pub static ref THROW_EXCEPTION : RuntimeEntrypoint = RuntimeEntrypoint {
sig: P(MuFuncSig {
hdr: MuEntityHeader::unnamed(ir::new_internal_id()),
ret_tys: vec![],
arg_tys: vec![ADDRESS_TYPE.clone()]
}),
aot: ValueLocation::Relocatable(RegGroup::GPR, String::from("muentry_throw_exception")),
jit: RwLock::new(None),
};
}
\ No newline at end of file
use utils::Address;
use runtime::thread;
#[no_mangle]
#[cfg(target_arch = "x86_64")]
pub extern fn mu_throw_exception(exception_obj: Address) {
trace!("throwing exception: {}", exception_obj);
thread::MuThread::current_mut().exception_obj = exception_obj;
}
\ No newline at end of file
use ast::ir::*;
use compiler::machine_code::CompiledFunction;
use compiler::frame::*;
use compiler::backend::x86_64;
use utils::Address;
use utils::POINTER_SIZE;
use runtime::thread;
use std::sync::RwLock;
use std::sync::RwLockReadGuard;
use std::collections::HashMap;
#[no_mangle]
pub extern fn muentry_throw_exception(exception_obj: Address) {
trace!("throwing exception: {}", exception_obj);
let mut cur_thread = thread::MuThread::current_mut();
// set exception object
cur_thread.exception_obj = exception_obj;
let cf_lock = cur_thread.vm.compiled_funcs().read().unwrap();
// rbp of current frame (mu_throw_exception(), Rust frame)
let rust_frame_rbp = unsafe {thread::get_current_frame_rbp()};
let rust_frame_return_addr = unsafe {rust_frame_rbp.plus(POINTER_SIZE).load::<Address>()};
// the return address is within throwing frame
let throw_frame_callsite = rust_frame_return_addr;
let throw_func_id = find_func_for_address(&cf_lock, throw_frame_callsite);
// skip to previous frame
// this is the frame that throws the exception
let rbp = unsafe {rust_frame_rbp.load::<Address>()};
// set cursor to throwing frame
let mut cursor = FrameCursor {
rbp: rbp,
return_addr: unsafe {rbp.plus(POINTER_SIZE).load::<Address>()},
func_id: throw_func_id,
callee_saved_locs: HashMap::new()
};
loop {
// get return address (the slot above RBP slot)
// let return_addr = unsafe {rbp.plus(POINTER_SIZE).load::<Address>()};
// check if return_addr is valid
// FIXME: should use a sentinel value here
if cursor.return_addr.is_zero() {
panic!("cannot find exception catch block, throws by {}", throw_func_id);
}
let callsite = cursor.return_addr;
let rwlock_cf = match cf_lock.get(&cursor.func_id) {
Some(ret) => ret,
None => panic!("cannot find compiled func with func_id {}, possibly didnt find the right frame for return address", cursor.func_id)
};
let rwlock_cf = rwlock_cf.read().unwrap();
let ref frame = rwlock_cf.frame;
// update callee saved register location
for reg in x86_64::CALLEE_SAVED_GPRs.iter() {
let reg_id = reg.id();
if frame.allocated.contains_key(&reg_id) {
let offset_from_rbp = frame.allocated.get(&reg_id).unwrap().offset;
let reg_restore_addr = cursor.rbp.offset(offset_from_rbp);
cursor.callee_saved_locs.insert(reg_id, reg_restore_addr);
}
}
cursor.to_previous_frame(&cf_lock);
// find exception block - comparing callsite with frame info
let ref exception_callsites = frame.exception_callsites;
for (possible_callsite, dest) in exception_callsites {
let possible_callsite_addr = possible_callsite.to_address();
if callsite == possible_callsite_addr {
// found an exception block
let dest_addr = dest.to_address();
// restore callee saved register and jump to dest_addr
}
}
// keep unwinding
}
}
struct FrameCursor {
rbp: Address,
return_addr: Address,
func_id: MuID,
callee_saved_locs: HashMap<MuID, Address>
}
impl FrameCursor {
fn to_previous_frame(&mut self, cf: &RwLockReadGuard<HashMap<MuID, RwLock<CompiledFunction>>>) {
let previous_rbp = unsafe {self.rbp.load::<Address>()};
let previous_return_addr = unsafe {previous_rbp.plus(POINTER_SIZE).load::<Address>()};
let previous_func_id = find_func_for_address(cf, self.return_addr);
self.rbp = previous_rbp;
self.return_addr = previous_return_addr;
self.func_id = previous_func_id;
}
}
fn find_func_for_address (cf: &RwLockReadGuard<HashMap<MuID, RwLock<CompiledFunction>>>, pc_addr: Address) -> MuID {
unimplemented!()
}
\ No newline at end of file
......@@ -16,6 +16,9 @@ use std::sync::Arc;
pub extern crate gc as mm;
pub mod thread;
pub mod entrypoints;
#[cfg(target_arch = "x86_64")]
#[path = "exception_x64.rs"]
pub mod exception;
// consider using libloading crate instead of the raw c functions for dynalic libraries
......@@ -42,7 +45,7 @@ pub fn resolve_symbol(symbol: String) -> Address {
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
#[derive(Clone, Debug)]
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub enum ValueLocation {
Register(RegGroup, MuID), // 0
Constant(RegGroup, Word), // 1
......@@ -144,6 +147,16 @@ impl ValueLocation {
_ => unimplemented!()
}
}
pub fn to_address(&self) -> Address {
match self {
&ValueLocation::Register(_, _)
| &ValueLocation::Constant(_, _) => panic!("a register/constant cannot be turned into address"),
&ValueLocation::Direct(_, addr) => addr,
&ValueLocation::Indirect(_, addr) => unsafe {addr.load::<Address>()},
&ValueLocation::Relocatable(_, ref symbol) => resolve_symbol(symbol.clone())
}
}
}
pub const PRIMORDIAL_ENTRY : &'static str = "src/runtime/main.c";
......
......@@ -11,7 +11,7 @@ void set_thread_local(void* thread) {
mu_tls = thread;
}
void* get_thread_local() {
void* muentry_get_thread_local() {
printf("Thread%p: getting mu_tls as %p\n", pthread_self(), mu_tls);
return mu_tls;
}
......
......@@ -49,8 +49,8 @@ _swap_to_mu_stack:
# _swap_back_to_native_stack(sp_loc: Address)
# %rdi
.globl _swap_back_to_native_stack
_swap_back_to_native_stack:
.globl _muentry_swap_back_to_native_stack
_muentry_swap_back_to_native_stack:
movq 0(%rdi), %rsp
popq %r15
......@@ -61,3 +61,9 @@ _swap_back_to_native_stack:
popq %rbp
ret
# _get_current_frame_rbp() -> Address
.globl _get_current_frame_rbp
_get_current_frame_rbp:
movq %rbp, %rax
ret
......@@ -207,13 +207,13 @@ pub enum MuStackState {
pub struct MuThread {
pub hdr: MuEntityHeader,
allocator: mm::Mutator,
stack: Option<Box<MuStack>>,
pub stack: Option<Box<MuStack>>,
native_sp_loc: Address,
user_tls: Option<Address>,
vm: Arc<VM>,
exception_obj: Address
pub vm: Arc<VM>,
pub exception_obj: Address
}
// this depends on the layout of MuThread
......@@ -236,7 +236,7 @@ lazy_static! {
#[link(name = "runtime")]
extern "C" {
pub fn set_thread_local(thread: *mut MuThread);
pub fn get_thread_local() -> Address;
pub fn muentry_get_thread_local() -> Address;
}
#[cfg(target_arch = "x86_64")]
......@@ -244,7 +244,8 @@ extern "C" {
#[link(name = "swap_stack")]
extern "C" {
fn swap_to_mu_stack(new_sp: Address, entry: Address, old_sp_loc: Address);
fn swap_back_to_native_stack(sp_loc: Address);
fn muentry_swap_back_to_native_stack(sp_loc: Address);
pub fn get_current_frame_rbp() -> Address;
}
impl MuThread {
......@@ -260,15 +261,17 @@ impl MuThread {
}
}
#[inline(always)]
pub fn current() -> &'static MuThread {
unsafe{
get_thread_local().to_ptr::<MuThread>().as_ref().unwrap()
muentry_get_thread_local().to_ptr::<MuThread>().as_ref().unwrap()
}
}
#[inline(always)]
pub fn current_mut() -> &'static mut MuThread {
unsafe{
get_thread_local().to_ptr_mut::<MuThread>().as_mut().unwrap()
muentry_get_thread_local().to_ptr_mut::<MuThread>().as_mut().unwrap()
}
}
......@@ -300,7 +303,7 @@ impl MuThread {
// set thread local
unsafe {set_thread_local(muthread)};
let addr = unsafe {get_thread_local()};
let addr = unsafe {muentry_get_thread_local()};
let sp_threadlocal_loc = addr.plus(*NATIVE_SP_LOC_OFFSET);
debug!("new sp: 0x{:x}", new_sp);
......
#![allow(dead_code)]
use std::cmp;
use std::fmt;
use std::mem;
......@@ -11,13 +13,18 @@ impl Address {
pub fn plus(&self, bytes: usize) -> Self {
Address(self.0 + bytes)
}
#[allow(dead_code)]
#[inline(always)]
pub fn sub(&self, bytes: usize) -> Self {
Address(self.0 - bytes)
}
#[inline(always)]
pub fn offset<T>(&self, offset: isize) -> Self {
pub fn offset(&self, offset: isize) -> Self {
debug_assert!((self.0 as isize) < 0);
Address((self.0 as isize + offset) as usize)
}
#[inline(always)]
pub fn shift<T>(&self, offset: isize) -> Self {
debug_assert!((self.0 as isize) < 0);
Address((self.0 as isize + mem::size_of::<T>() as isize * offset) as usize)
}
#[inline(always)]
......
......@@ -317,7 +317,7 @@ pub fn factorial() -> VM {
args: vec![1],
convention: CallConvention::Mu
},
is_abort: true
is_abort: false
}
});
......
Markdown is supported
0%