Commit 9c0d8d0a authored by qinsoon's avatar qinsoon

allow storing funcref in the heap, and call via a ssa var

parent 95ee86b9
Pipeline #334 failed with stage
in 56 minutes and 28 seconds
......@@ -1042,6 +1042,18 @@ impl Decodable for MuEntityHeader {
}
}
pub fn name_check(name: MuName) -> MuName {
let name = name.replace('.', "$");
if name.starts_with("@") || name.starts_with("%") {
let (_, name) = name.split_at(1);
return name.to_string();
}
name
}
impl MuEntityHeader {
pub fn unnamed(id: MuID) -> MuEntityHeader {
MuEntityHeader {
......@@ -1053,7 +1065,7 @@ impl MuEntityHeader {
pub fn named(id: MuID, name: MuName) -> MuEntityHeader {
MuEntityHeader {
id: id,
name: RwLock::new(Some(MuEntityHeader::name_check(name)))
name: RwLock::new(Some(name_check(name)))
}
}
......@@ -1067,19 +1079,7 @@ impl MuEntityHeader {
pub fn set_name(&self, name: MuName) {
let mut name_guard = self.name.write().unwrap();
*name_guard = Some(MuEntityHeader::name_check(name));
}
pub fn name_check(name: MuName) -> MuName {
let name = name.replace('.', "$");
if name.starts_with("@") || name.starts_with("%") {
let (_, name) = name.split_at(1);
return name.to_string();
}
name
*name_guard = Some(name_check(name));
}
fn abbreviate_name(&self) -> Option<MuName> {
......
......@@ -1008,26 +1008,23 @@ impl ASMCodeGen {
fn prepare_machine_regs(&self, regs: Iter<P<Value>>) -> Vec<MuID> {
regs.map(|x| self.prepare_machine_reg(x)).collect()
}
fn add_asm_call(&mut self, code: String, potentially_excepting: Option<MuName>) {
// a call instruction will use all the argument registers
// do not need
let uses : LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
// for reg in x86_64::ARGUMENT_GPRs.iter() {
// uses.insert(reg.id(), vec![]);
// }
// for reg in x86_64::ARGUMENT_FPRs.iter() {
// uses.insert(reg.id(), vec![]);
// }
// defines: return registers
fn add_asm_call_with_extra_uses(&mut self,
code: String,
extra_uses: LinkedHashMap<MuID, Vec<ASMLocation>>,
potentially_excepting: Option<MuName>) {
let uses = extra_uses;
// defines
let mut defines : LinkedHashMap<MuID, Vec<ASMLocation>> = LinkedHashMap::new();
// return registers get defined
for reg in x86_64::RETURN_GPRs.iter() {
defines.insert(reg.id(), vec![]);
}
for reg in x86_64::RETURN_FPRs.iter() {
defines.insert(reg.id(), vec![]);
}
// caller saved register will be destroyed
for reg in x86_64::CALLER_SAVED_GPRs.iter() {
if !defines.contains_key(&reg.id()) {
defines.insert(reg.id(), vec![]);
......@@ -1038,7 +1035,7 @@ impl ASMCodeGen {
defines.insert(reg.id(), vec![]);
}
}
self.add_asm_inst_internal(code, defines, uses, false, {
if potentially_excepting.is_some() {
ASMBranchTarget::PotentiallyExcepting(potentially_excepting.unwrap())
......@@ -1048,6 +1045,10 @@ impl ASMCodeGen {
}, None)
}
fn add_asm_call(&mut self, code: String, potentially_excepting: Option<MuName>) {
self.add_asm_call_with_extra_uses(code, LinkedHashMap::new(), potentially_excepting);
}
fn add_asm_ret(&mut self, code: String) {
// return instruction does not use anything (not RETURN REGS)
// otherwise it will keep RETURN REGS alive
......@@ -2839,7 +2840,18 @@ impl CodeGenerator for ASMCodeGen {
fn emit_call_near_r64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
trace!("emit: call {}", func);
unimplemented!()
let (reg, id, loc) = self.prepare_reg(func, 6);
let asm = format!("call *{}", reg);
self.add_asm_call_with_extra_uses(asm, linked_hashmap!{id => vec![loc]}, pe);
let callsite_symbol = symbol(callsite.clone());
self.add_asm_symbolic(directive_globl(callsite_symbol.clone()));
self.add_asm_symbolic(format!("{}:", callsite_symbol.clone()));
ValueLocation::Relocatable(RegGroup::GPR, callsite)
}
fn emit_call_near_mem64(&mut self, callsite: String, func: &P<Value>, pe: Option<MuName>) -> ValueLocation {
......
......@@ -35,7 +35,7 @@ extern "C" {
pub fn resolve_symbol(symbol: String) -> Address {
use std::ptr;
let symbol = MuEntityHeader::name_check(symbol);
let symbol = name_check(symbol);
let rtld_default = unsafe {dlopen(ptr::null(), 0)};
let ret = unsafe {dlsym(rtld_default, CString::new(symbol.clone()).unwrap().as_ptr())};
......
......@@ -45,7 +45,7 @@ pub enum APIHandleValue {
Ref (P<MuType>, Address), // referenced type
IRef(P<MuType>, Address),
TagRef64(u64),
FuncRef,
FuncRef(MuID),
ThreadRef,
StackRef,
FCRef, // frame cursor ref
......@@ -62,7 +62,6 @@ pub enum APIHandleValue {
// GenRef->IR->Child->Var->Global
Global(MuID),
Func(MuID),
ExpFunc,
// GenRef->IR->Child->Var->Local
......@@ -92,7 +91,7 @@ impl fmt::Debug for APIHandleValue {
&Ref(ref ty, addr) => write!(f, "ref<{}> to {}", ty, addr),
&IRef(ref ty, addr) => write!(f, "iref<{}> to {}", ty, addr),
&TagRef64(val) => write!(f, "tagref64 0x{:x}", val),
&FuncRef => write!(f, "funcref"),
&FuncRef(id) => write!(f, "funcref to #{}", id),
&ThreadRef => write!(f, "threadref"),
&StackRef => write!(f, "stackref"),
&FCRef => write!(f, "framecursorref"),
......@@ -103,7 +102,6 @@ impl fmt::Debug for APIHandleValue {
&BB => write!(f, "IR.BB"),
&Inst => write!(f, "IR.inst"),
&Global(id) => write!(f, "IR.global to #{}", id),
&Func(id) => write!(f, "IR.func to #{}", id),
&ExpFunc => write!(f, "IR.expfunc"),
&NorParam => write!(f, "IR.norparam"),
&ExcParam => write!(f, "IR.excparam"),
......@@ -180,9 +178,9 @@ impl APIHandleValue {
}
}
pub fn as_func(&self) -> MuID {
pub fn as_funcref(&self) -> MuID {
match self {
&APIHandleValue::Func(id) => id,
&APIHandleValue::FuncRef(id) => id,
_ => panic!("expected FuncRef")
}
}
......
......@@ -32,7 +32,7 @@ use std::sync::atomic::{AtomicUsize, AtomicBool, ATOMIC_BOOL_INIT, ATOMIC_USIZE_
// possibly INTERNAL_ID in ir.rs, internal types, etc
pub struct VM {
// serialize
// ---serialize---
// 0
next_id: AtomicUsize,
// 1
......@@ -61,11 +61,20 @@ pub struct VM {
// 12
pub vm_options: VMOptions,
// partially serialize
// ---partially serialize---
// 13
compiled_funcs: RwLock<HashMap<MuID, RwLock<CompiledFunction>>>,
// ---do not serialize---
// client may try to store funcref to the heap, so that they can load it later, and call it
// however the store may happen before we have an actual address to the func (in AOT scenario)
aot_pending_funcref_store: RwLock<HashMap<Address, ValueLocation>>
}
use std::u64;
const PENDING_FUNCREF : u64 = u64::MAX;
const VM_SERIALIZE_FIELDS : usize = 14;
impl Encodable for VM {
......@@ -371,7 +380,8 @@ impl Decodable for VM {
primordial: RwLock::new(primordial),
is_running: ATOMIC_BOOL_INIT,
vm_options: vm_options,
compiled_funcs: RwLock::new(compiled_funcs)
compiled_funcs: RwLock::new(compiled_funcs),
aot_pending_funcref_store: RwLock::new(HashMap::new())
};
vm.next_id.store(next_id, Ordering::SeqCst);
......@@ -431,7 +441,9 @@ impl <'a> VM {
funcs: RwLock::new(HashMap::new()),
compiled_funcs: RwLock::new(HashMap::new()),
primordial: RwLock::new(None)
primordial: RwLock::new(None),
aot_pending_funcref_store: RwLock::new(HashMap::new())
};
// insert all intenral types
......@@ -1023,7 +1035,7 @@ impl <'a> VM {
panic!("Zebu doesnt support creating primordial thread through a stack, name a entry function instead")
} else {
// extract func id
let func_id = primordial_func.unwrap().v.as_func();
let func_id = primordial_func.unwrap().v.as_funcref();
// make primordial thread in vm
self.make_primordial_thread(func_id, false, vec![]); // do not pass const args, use argc/argv
......@@ -1034,7 +1046,7 @@ impl <'a> VM {
let mut ret = hashmap!{};
for i in 0..sym_fields.len() {
let addr = sym_fields[i].v.as_address();
ret.insert(addr, sym_strings[i].clone());
ret.insert(addr, name_check(sym_strings[i].clone()));
}
ret
};
......@@ -1042,10 +1054,21 @@ impl <'a> VM {
assert_eq!(reloc_fields.len(), reloc_strings.len());
let fields = {
let mut ret = hashmap!{};
// client supplied relocation fields
for i in 0..reloc_fields.len() {
let addr = reloc_fields[i].v.as_address();
ret.insert(addr, reloc_strings[i].clone());
ret.insert(addr, name_check(reloc_strings[i].clone()));
}
// pending funcrefs - we want to replace them as symbol
{
let mut pending_funcref = self.aot_pending_funcref_store.write().unwrap();
for (addr, vl) in pending_funcref.drain() {
ret.insert(addr, name_check(vl.to_relocatable()));
}
}
ret
};
......@@ -1144,7 +1167,7 @@ impl <'a> VM {
v : APIHandleValue::IRef(inner_ty, addr)
})
},
APIHandleValue::FuncRef => unimplemented!(),
APIHandleValue::FuncRef(_) => unimplemented!(),
_ => panic!("unexpected operand for refcast: {:?}", from_op)
}
......@@ -1320,13 +1343,29 @@ impl <'a> VM {
APIHandleValue::Ref(_, aval)
| APIHandleValue::IRef(_, aval) => addr.store::<Address>(aval),
_ => unimplemented!()
// if we are JITing, we can store the address of the function
// but if we are doing AOT, we pend the store, and resolve the store when making boot image
APIHandleValue::FuncRef(id) => self.store_funcref(addr, id),
_ => panic!("unimplemented store for handle {}", val.v)
}
}
trace!("API: store value {:?} to location {:?}", val, loc);
}
#[cfg(feature = "aot")]
fn store_funcref(&self, addr: Address, func_id: MuID) {
// put a pending funcref in the address
unsafe {addr.store::<u64>(PENDING_FUNCREF)};
// and record this funcref
let symbol = self.name_of(func_id);
let mut pending_funcref_guard = self.aot_pending_funcref_store.write().unwrap();
pending_funcref_guard.insert(addr, ValueLocation::Relocatable(backend::RegGroup::GPR, symbol));
}
// this function and the following two make assumption that GC will not move object
// they need to be reimplemented if we have a moving GC
pub fn handle_pin_object(&self, loc: APIHandleArg) -> APIHandleResult {
......@@ -1363,7 +1402,7 @@ impl <'a> VM {
self.new_handle(APIHandle {
id: handle_id,
v : APIHandleValue::Func(id)
v : APIHandleValue::FuncRef(id)
})
}
......
#[macro_use]
extern crate mu;
#[macro_use]
extern crate utils;
#[macro_use]
extern crate log;
#[macro_use]
extern crate maplit;
#[macro_use]
......
......@@ -637,4 +637,141 @@ fn persist_hybrid(vm: &VM) {
define_func_ver! ((vm) persist_hybrid_v1 (entry: blk_entry) {
blk_entry, blk_loop_head, blk_loop_body, blk_exit
});
}
#[test]
fn test_persist_funcref() {
VM::start_logging_trace();
let vm = Arc::new(VM::new_with_opts("init_mu --disable-inline"));
unsafe {
MuThread::current_thread_as_mu_thread(Address::zero(), vm.clone());
}
persist_funcref(&vm);
let compiler = Compiler::new(CompilerPolicy::default(), &vm);
let func_ret42_id = vm.id_of("ret42");
{
let funcs = vm.funcs().read().unwrap();
let func = funcs.get(&func_ret42_id).unwrap().read().unwrap();
let func_vers = vm.func_vers().read().unwrap();
let mut func_ver = func_vers.get(&func.cur_ver.unwrap()).unwrap().write().unwrap();
compiler.compile(&mut func_ver);
}
let func_my_main_id = vm.id_of("my_main");
{
let funcs = vm.funcs().read().unwrap();
let func = funcs.get(&func_my_main_id).unwrap().read().unwrap();
let func_vers = vm.func_vers().read().unwrap();
let mut func_ver = func_vers.get(&func.cur_ver.unwrap()).unwrap().write().unwrap();
compiler.compile(&mut func_ver);
}
// store funcref to ret42 in the global
{
let global_id = vm.id_of("my_global");
let global_handle = vm.handle_from_global(global_id);
let func_ret42_handle = vm.handle_from_func(func_ret42_id);
debug!("write {:?} to location {:?}", func_ret42_handle, global_handle);
vm.handle_store(MemoryOrder::Relaxed, &global_handle, &func_ret42_handle);
}
let my_main_handle = vm.handle_from_func(func_my_main_id);
// make boot image
vm.make_boot_image(
vec![func_ret42_id, func_my_main_id], // whitelist
Some(&my_main_handle), None, // primoridal func, stack
None, // threadlocal
vec![], vec![], // sym fields/strings
vec![], vec![], // reloc fields/strings
"test_persist_funcref".to_string()
);
// link
let executable = {
use std::path;
let mut path = path::PathBuf::new();
path.push(&vm.vm_options.flag_aot_emit_dir);
path.push("test_persist_funcref");
path
};
let output = aot::execute_nocheck(executable);
assert!(output.status.code().is_some());
let ret_code = output.status.code().unwrap();
println!("return code: {}", ret_code);
assert!(ret_code == 42);
}
fn persist_funcref(vm: &VM) {
typedef! ((vm) int64 = mu_int(64));
constdef! ((vm) <int64> int64_42 = Constant::Int(42));
funcsig! ((vm) ret42_sig = () -> (int64));
funcdecl! ((vm) <ret42_sig> ret42);
funcdef! ((vm) <ret42_sig> ret42 VERSION ret42_v1);
typedef! ((vm) funcref_to_ret42 = mu_funcref(ret42_sig));
globaldef! ((vm) <funcref_to_ret42> my_global);
// ---ret42---
{
// blk entry
block! ((vm, ret42_v1) blk_entry);
consta! ((vm, ret42_v1) int64_42_local = int64_42);
inst! ((vm, ret42_v1) blk_entry_ret:
RET (int64_42_local)
);
define_block!((vm, ret42_v1) blk_entry() {
blk_entry_ret
});
define_func_ver!((vm) ret42_v1 (entry: blk_entry) {blk_entry});
}
// ---my_main---
{
funcsig! ((vm) my_main_sig = () -> ());
funcdecl! ((vm) <my_main_sig> my_main);
funcdef! ((vm) <my_main_sig> my_main VERSION my_main_v1);
// blk entry
block! ((vm, my_main_v1) blk_entry);
global! ((vm, my_main_v1) blk_entry_global = my_global);
ssa! ((vm, my_main_v1) <funcref_to_ret42> func);
inst! ((vm, my_main_v1) blk_entry_load:
func = LOAD blk_entry_global (is_ptr: false, order: MemoryOrder::SeqCst)
);
ssa! ((vm, my_main_v1) <int64> blk_entry_res);
inst! ((vm, my_main_v1) blk_entry_call:
blk_entry_res = EXPRCALL (CallConvention::Mu, is_abort: false) func ()
);
let blk_entry_exit = gen_ccall_exit(blk_entry_res.clone(), &mut my_main_v1, &vm);
inst! ((vm, my_main_v1) blk_entry_ret:
RET
);
define_block! ((vm, my_main_v1) blk_entry() {
blk_entry_load,
blk_entry_call,
blk_entry_exit,
blk_entry_ret
});
define_func_ver!((vm) my_main_v1 (entry: blk_entry) {
blk_entry
});
}
}
\ No newline at end of file
......@@ -1148,8 +1148,8 @@ def test_linked_list():
assert res.returncode == 0, res.err
assert res.out == '1\n'
@pytest.mark.xfail(reason='unimplemented memory order in API store')
@may_spawn_proc
@pytest.mark.xfail(reason='segment fault')
def test_rpytarget_richards():
from rpython.translator.goal.richards import entry_point
def main(argv):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment