WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit ff556adc authored by qinsoon's avatar qinsoon
Browse files

[wip] working on using global through API. make sure bulk defining and

single defining has same behavior
parent a540f43e
......@@ -9,6 +9,7 @@ use ast::ptr::*;
use ast::types::*;
use utils::Address;
use compiler::backend::RegGroup;
use compiler::backend::BackendTypeInfo;
use vm::VM;
use runtime::ValueLocation;
use runtime::thread::MuThread;
......@@ -31,8 +32,7 @@ fn allocate(size: ByteSize, align: ByteSize, encode: u64, hybrid_len: Option<u64
ret
}
pub fn allocate_fixed(ty: P<MuType>, vm: &VM) -> Address {
let backendtype = vm.get_backend_type_info(ty.id());
pub fn allocate_fixed(ty: P<MuType>, backendtype: Box<BackendTypeInfo>) -> Address {
let gctype = backendtype.gc_type.clone();
let encode = get_gc_type_encode(gctype.id);
......@@ -43,8 +43,7 @@ pub fn allocate_fixed(ty: P<MuType>, vm: &VM) -> Address {
allocate(gctype.size(), gctype.alignment, encode, None).to_address()
}
pub fn allocate_hybrid(ty: P<MuType>, len: u64, vm: &VM) -> Address {
let backendtype = vm.get_backend_type_info((ty.id()));
pub fn allocate_hybrid(ty: P<MuType>, len: u64, backendtype: Box<BackendTypeInfo>) -> Address {
let gctype = backendtype.gc_type.clone();
let encode = get_gc_type_encode(gctype.id);
......@@ -55,12 +54,12 @@ pub fn allocate_hybrid(ty: P<MuType>, len: u64, vm: &VM) -> Address {
allocate(gctype.size_hybrid(len as u32), gctype.alignment, encode, Some(len)).to_address()
}
pub fn allocate_global(iref_global: P<Value>, vm: &VM) -> ValueLocation {
pub fn allocate_global(iref_global: P<Value>, backendtype: Box<BackendTypeInfo>) -> ValueLocation {
let referenced_type = match iref_global.ty.get_referenced_ty() {
Some(ty) => ty,
None => panic!("expected global to be an iref type, found {}", iref_global.ty)
};
let addr = allocate_fixed(referenced_type, vm);
let addr = allocate_fixed(referenced_type, backendtype);
ValueLocation::Direct(RegGroup::GPR, addr)
}
......@@ -334,6 +334,20 @@ impl MuThread {
// fake_swap_mu_thread(sp_threadlocal_loc);
// }
}
/// turn this current mu thread back as normal thread
pub unsafe fn cleanup_current_mu_thread() {
let mu_thread_addr = unsafe {muentry_get_thread_local()};
if !mu_thread_addr.is_zero() {
let mu_thread : *mut MuThread = mu_thread_addr.to_ptr_mut();
mm::drop_mutator(&mut (*mu_thread).allocator as *mut mm::Mutator);
let mu_thread : Box<MuThread> = unsafe {Box::from_raw(mu_thread)};
// drop mu_thread here
}
}
pub fn new_thread_normal(mut stack: Box<MuStack>, threadlocal: Address, vals: Vec<ValueLocation>, vm: Arc<VM>) -> JoinHandle<()> {
// set up arguments on stack
......
......@@ -23,7 +23,7 @@ pub fn current_thread_as_mu_thread(threadlocal: Address, api_vm: *const api_c::C
let hdr = ((*api_vm).header) as *const api_impl::MuVM;
let vm = ((*hdr).vm).clone();
thread::MuThread::current_thread_as_mu_thread(threadlocal, vm)
thread::MuThread::current_thread_as_mu_thread(threadlocal, vm);
}
}
......@@ -22,6 +22,7 @@ use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
use log::LogLevel;
use std::path;
use std::sync::RwLock;
use std::sync::RwLockWriteGuard;
use std::sync::atomic::{AtomicUsize, AtomicBool, ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT, Ordering};
// FIXME:
......@@ -578,14 +579,19 @@ impl <'a> VM {
pub fn declare_const(&self, id: MuID, ty: P<MuType>, val: Constant) -> P<Value> {
let mut constants = self.constants.write().unwrap();
debug_assert!(!constants.contains_key(&id));
let ret = P(Value{hdr: MuEntityHeader::unnamed(id), ty: ty, v: Value_::Constant(val)});
trace!("declare const #{} = {}", id, ret);
constants.insert(id, ret.clone());
self.declare_const_internal(&mut constants, id, ret.clone());
ret
}
fn declare_const_internal(&self, map: &mut RwLockWriteGuard<HashMap<MuID, P<Value>>>, id: MuID, val: P<Value>) {
debug_assert!(!map.contains_key(&id));
info!("declare const #{} = {}", id, val);
map.insert(id, val);
}
pub fn get_const(&self, id: MuID) -> P<Value> {
let const_lock = self.constants.read().unwrap();
......@@ -603,29 +609,62 @@ impl <'a> VM {
});
let mut globals = self.globals.write().unwrap();
trace!("declare global #{} = {}", id, global);
globals.insert(id, global.clone());
// allocate global
let loc = gc::allocate_global(global.clone(), self);
let mut global_locs = self.global_locations.write().unwrap();
trace!("allocate global #{} as {}", id, loc);
global_locs.insert(id, loc);
self.declare_global_internal(&mut globals, &mut global_locs, id, global.clone());
global
}
fn declare_global_internal(
&self,
globals: &mut RwLockWriteGuard<HashMap<MuID, P<Value>>>,
global_locs: &mut RwLockWriteGuard<HashMap<MuID, ValueLocation>>,
id: MuID, val: P<Value>
) {
self.declare_global_internal_no_alloc(globals, id, val.clone());
self.alloc_global(global_locs, id, val);
}
// when bulk declaring, we hold locks for everything, we cannot resolve backend type, and do alloc
fn declare_global_internal_no_alloc(
&self,
globals: &mut RwLockWriteGuard<HashMap<MuID, P<Value>>>,
id: MuID, val: P<Value>
) {
debug_assert!(!globals.contains_key(&id));
info!("declare global #{} = {}", id, val);
globals.insert(id, val.clone());
}
fn alloc_global(
&self,
global_locs: &mut RwLockWriteGuard<HashMap<MuID, ValueLocation>>,
id: MuID, val: P<Value>
) {
let backend_ty = self.get_backend_type_info(val.ty.get_referenced_ty().unwrap().id());
let loc = gc::allocate_global(val, backend_ty);
info!("allocate global #{} as {}", id, loc);
global_locs.insert(id, loc);
}
pub fn declare_type(&self, id: MuID, ty: MuType_) -> P<MuType> {
let ty = P(MuType{hdr: MuEntityHeader::unnamed(id), v: ty});
let mut types = self.types.write().unwrap();
debug_assert!(!types.contains_key(&id));
trace!("declare type #{} = {}", ty.id(), ty);
types.insert(ty.id(), ty.clone());
self.declare_type_internal(&mut types, id, ty.clone());
ty
}
fn declare_type_internal(&self, types: &mut RwLockWriteGuard<HashMap<MuID, P<MuType>>>, id: MuID, ty: P<MuType>) {
debug_assert!(!types.contains_key(&id));
info!("declare type #{} = {}", id, ty);
types.insert(id, ty.clone());
}
pub fn get_type(&self, id: MuID) -> P<MuType> {
let type_lock = self.types.read().unwrap();
......@@ -636,14 +675,20 @@ impl <'a> VM {
}
pub fn declare_func_sig(&self, id: MuID, ret_tys: Vec<P<MuType>>, arg_tys: Vec<P<MuType>>) -> P<MuFuncSig> {
let mut func_sigs = self.func_sigs.write().unwrap();
debug_assert!(!func_sigs.contains_key(&id));
let ret = P(MuFuncSig{hdr: MuEntityHeader::unnamed(id), ret_tys: ret_tys, arg_tys: arg_tys});
func_sigs.insert(id, ret.clone());
let mut func_sigs = self.func_sigs.write().unwrap();
self.declare_func_sig_internal(&mut func_sigs, id, ret.clone());
ret
}
fn declare_func_sig_internal(&self, sigs: &mut RwLockWriteGuard<HashMap<MuID, P<MuFuncSig>>>, id: MuID, sig: P<MuFuncSig>) {
debug_assert!(!sigs.contains_key(&id));
info!("declare func sig #{} = {}", id, sig);
sigs.insert(id, sig);
}
pub fn get_func_sig(&self, id: MuID) -> P<MuFuncSig> {
let func_sig_lock = self.func_sigs.read().unwrap();
......@@ -654,9 +699,16 @@ impl <'a> VM {
}
pub fn declare_func (&self, func: MuFunction) {
info!("declare function {}", func);
let mut funcs = self.funcs.write().unwrap();
funcs.insert(func.id(), RwLock::new(func));
self.declare_func_internal(&mut funcs, func.id(), func);
}
fn declare_func_internal(&self, funcs: &mut RwLockWriteGuard<HashMap<MuID, RwLock<MuFunction>>>, id: MuID, func: MuFunction) {
debug_assert!(!funcs.contains_key(&id));
info!("declare func #{} = {}", id, func);
funcs.insert(id, RwLock::new(func));
}
/// The IR builder needs to look-up the function signature from the existing function ID.
......@@ -709,53 +761,68 @@ impl <'a> VM {
) {
// Make sure other components, if ever acquiring multiple locks at the same time, acquire
// them in this order, to prevent deadlock.
let mut id_name_map = self.id_name_map.write().unwrap();
let mut name_id_map = self.name_id_map.write().unwrap();
let mut types = self.types.write().unwrap();
let mut constants = self.constants.write().unwrap();
let mut globals = self.globals.write().unwrap();
let mut func_sigs = self.func_sigs.write().unwrap();
let mut funcs = self.funcs.write().unwrap();
let mut func_vers = self.func_vers.write().unwrap();
{
let mut id_name_map = self.id_name_map.write().unwrap();
let mut name_id_map = self.name_id_map.write().unwrap();
let mut types = self.types.write().unwrap();
let mut constants = self.constants.write().unwrap();
let mut globals = self.globals.write().unwrap();
let mut func_sigs = self.func_sigs.write().unwrap();
let mut funcs = self.funcs.write().unwrap();
let mut func_vers = self.func_vers.write().unwrap();
for (id, name) in new_id_name_map.drain() {
id_name_map.insert(id, name.clone());
name_id_map.insert(name, id);
}
for (id, name) in new_id_name_map.drain() {
id_name_map.insert(id, name.clone());
name_id_map.insert(name, id);
}
for (id, obj) in new_types.drain() {
types.insert(id, obj);
}
for (id, obj) in new_types.drain() {
self.declare_type_internal(&mut types, id, obj);
}
for (id, obj) in new_constants.drain() {
constants.insert(id, obj);
}
for (id, obj) in new_constants.drain() {
self.declare_const_internal(&mut constants, id, obj);
}
for (id, obj) in new_globals.drain() {
globals.insert(id, obj);
}
for (id, obj) in new_globals.drain() {
// we bulk allocate later (since we are holding all the locks, we cannot find ty info)
self.declare_global_internal_no_alloc(&mut globals, id, obj);
}
for (id, obj) in new_func_sigs.drain() {
func_sigs.insert(id, obj);
}
for (id, obj) in new_func_sigs.drain() {
self.declare_func_sig_internal(&mut func_sigs, id, obj);
}
for (id, obj) in new_funcs.drain() {
funcs.insert(id, RwLock::new(*obj));
}
for (id, obj) in new_funcs.drain() {
self.declare_func_internal(&mut funcs, id, *obj);
}
for (id, obj) in new_func_vers.drain() {
let func_id = obj.func_id;
func_vers.insert(id, RwLock::new(*obj));
for (id, obj) in new_func_vers.drain() {
let func_id = obj.func_id;
func_vers.insert(id, RwLock::new(*obj));
{
trace!("Adding funcver {} as a version of {}...", id, func_id);
let func = funcs.get_mut(&func_id).unwrap();
func.write().unwrap().new_version(id);
trace!("Added funcver {} as a version of {} {:?}.", id, func_id, func);
{
trace!("Adding funcver {} as a version of {}...", id, func_id);
let func = funcs.get_mut(&func_id).unwrap();
func.write().unwrap().new_version(id);
trace!("Added funcver {} as a version of {} {:?}.", id, func_id, func);
}
}
}
// Locks released here
// allocate all the globals defined
{
let globals = self.globals.read().unwrap();
let mut global_locs = self.global_locations.write().unwrap();
// make sure current thread has allocator
// MuThread::current_thread_as_mu_thread(unsafe {Address::zero()}, self);
for (id, global) in globals.iter() {
self.alloc_global(&mut global_locs, *id, global.clone());
}
}
}
pub fn add_compiled_func (&self, func: CompiledFunction) {
......@@ -867,12 +934,10 @@ impl <'a> VM {
}
pub fn new_fixed(&self, tyid: MuID) -> APIHandleResult {
let ty = {
let types_lock = self.types.read().unwrap();
types_lock.get(&tyid).unwrap().clone()
};
let ty = self.get_type(tyid);
let addr = gc::allocate_fixed(ty.clone(), self);
let backend_ty = self.get_backend_type_info(tyid);
let addr = gc::allocate_fixed(ty.clone(), backend_ty);
trace!("API: allocated fixed type {} at {}", ty, addr);
self.new_handle(APIHandle {
......@@ -885,7 +950,8 @@ impl <'a> VM {
let ty = self.get_type(tyid);
let len = self.handle_to_uint64(length);
let addr = gc::allocate_hybrid(ty.clone(), len, self);
let backend_ty = self.get_backend_type_info(tyid);
let addr = gc::allocate_hybrid(ty.clone(), len, backend_ty);
trace!("API: allocated hybrid type {} of length {} at {}", ty, len, addr);
self.new_handle(APIHandle {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment