Commit 8da4a884 authored by qinsoon's avatar qinsoon

refactor hybrid. implemented GetFieldIRef

Note: made two places in api implementation as unimplemented as
the instruction/type chnaged.
parent 9938b350
......@@ -176,7 +176,7 @@ pub enum Instruction_ {
GetFieldIRef{
is_ptr: bool,
base: OpIndex, // iref or uptr
index: OpIndex // constant
index: usize // constant
},
// iref|uptr<array<T N>> int<M> -> iref|uptr<T>
......@@ -306,7 +306,7 @@ impl Instruction_ {
&Instruction_::GetIRef(reference) => format!("GETIREF {}", ops[reference]),
&Instruction_::GetFieldIRef{is_ptr, base, index} => {
let ptr = select_value!(is_ptr, "PTR", "");
format!("GETFIELDIREF {} {} {}", ptr, ops[base], ops[index])
format!("GETFIELDIREF {} {} {}", ptr, ops[base], index)
},
&Instruction_::GetElementIRef{is_ptr, base, index} => {
let ptr = select_value!(is_ptr, "PTR", "");
......
......@@ -79,7 +79,7 @@ pub fn pick_op_code_for_ssa(ty: &P<MuType>) -> OpCode {
// we are not supposed to have these as SSA
Struct(_)
| Array(_, _)
| Hybrid(_, _)
| Hybrid(_)
| Void => panic!("Not expecting {} as SSA", ty),
// unimplemented
Vector(_, _) => unimplemented!()
......@@ -107,7 +107,7 @@ pub fn pick_op_code_for_value(ty: &P<MuType>) -> OpCode {
// we are not supposed to have these as SSA
Struct(_)
| Array(_, _)
| Hybrid(_, _)
| Hybrid(_)
| Void => unimplemented!(),
// unimplemented
Vector(_, _) => unimplemented!()
......
......@@ -32,6 +32,10 @@ lazy_static! {
pub static ref DOUBLE_TYPE : P<MuType> = P(
MuType::new(new_internal_id(), MuType_::double())
);
pub static ref VOID_TYPE : P<MuType> = P(
MuType::new(new_internal_id(), MuType_::void())
);
pub static ref INTERNAL_TYPES : Vec<P<MuType>> = vec![
ADDRESS_TYPE.clone(),
......@@ -58,6 +62,8 @@ impl MuType {
}
}
pub type StructTag = MuName;
pub type HybridTag = MuName;
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub enum MuType_ {
/// int <length>
......@@ -78,13 +84,13 @@ pub enum MuType_ {
UPtr (P<MuType>),
/// struct<T1 T2 ...>
Struct (MuName),
Struct (StructTag),
/// array<T length>
Array (P<MuType>, usize),
/// hybrid<F1 F2 ... V>: a hybrid of fixed length parts and a variable length part
Hybrid (Vec<P<MuType>>, P<MuType>),
Hybrid (HybridTag),
/// void
Void,
......@@ -124,7 +130,6 @@ impl fmt::Display for MuType_ {
&MuType_::WeakRef(ref ty) => write!(f, "weakref<{}>", ty),
&MuType_::UPtr(ref ty) => write!(f, "uptr<{}>", ty),
&MuType_::Array(ref ty, size) => write!(f, "array<{} {}>", ty, size),
&MuType_::Hybrid(ref fix_tys, ref var_ty) => write!(f, "hybrid<[{}] {}>", vec_utils::as_str(fix_tys), var_ty),
&MuType_::Void => write!(f, "void"),
&MuType_::ThreadRef => write!(f, "threadref"),
&MuType_::StackRef => write!(f, "stackref"),
......@@ -132,14 +137,17 @@ impl fmt::Display for MuType_ {
&MuType_::Vector(ref ty, size) => write!(f, "vector<{} {}>", ty, size),
&MuType_::FuncRef(ref sig) => write!(f, "funcref<{}>", sig),
&MuType_::UFuncPtr(ref sig) => write!(f, "ufuncref<{}>", sig),
&MuType_::Struct(ref tag) => write!(f, "{}(struct)", tag)
&MuType_::Struct(ref tag) => write!(f, "{}(struct)", tag),
&MuType_::Hybrid(ref tag) => write!(f, "{}(hybrid)", tag)
}
}
}
lazy_static! {
/// storing a map from MuName to StructType_
pub static ref STRUCT_TAG_MAP : RwLock<HashMap<MuName, StructType_>> = RwLock::new(HashMap::new());
pub static ref STRUCT_TAG_MAP : RwLock<HashMap<StructTag, StructType_>> = RwLock::new(HashMap::new());
/// storing a map from MuName to HybridType_
pub static ref HYBRID_TAG_MAP : RwLock<HashMap<HybridTag, HybridType_>> = RwLock::new(HashMap::new());
}
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
......@@ -176,6 +184,47 @@ impl StructType_ {
}
}
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub struct HybridType_ {
fix_tys: Vec<P<MuType>>,
var_ty : P<MuType>
}
impl HybridType_ {
pub fn new(fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> HybridType_ {
HybridType_ {fix_tys: fix_tys, var_ty: var_ty}
}
pub fn set_tys(&mut self, mut fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) {
self.fix_tys.clear();
self.fix_tys.append(&mut fix_tys);
self.var_ty = var_ty;
}
pub fn get_fix_tys(&self) -> &Vec<P<MuType>> {
&self.fix_tys
}
pub fn get_var_ty(&self) -> &P<MuType> {
&self.var_ty
}
}
impl fmt::Display for HybridType_ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "hybrid<").unwrap();
for i in 0..self.fix_tys.len() {
let ty = &self.fix_tys[i];
write!(f, "{}", ty).unwrap();
if i != self.fix_tys.len() - 1 {
write!(f, " ").unwrap();
}
}
write!(f, "|{}>", self.var_ty)
}
}
impl MuType_ {
pub fn int(len: usize) -> MuType_ {
MuType_::Int(len)
......@@ -204,7 +253,18 @@ impl MuType_ {
MuType_::Struct(tag)
}
pub fn mustruct(tag: MuName, list: Vec<P<MuType>>) -> MuType_ {
pub fn mustruct_put(tag: MuName, mut list: Vec<P<MuType>>) {
let mut map_guard = STRUCT_TAG_MAP.write().unwrap();
match map_guard.get_mut(&tag) {
Some(struct_ty_) => {
struct_ty_.tys.clear();
struct_ty_.tys.append(&mut list);
},
None => panic!("call mustruct_empty() to create an empty struct before mustruct_put()")
}
}
pub fn mustruct(tag: StructTag, list: Vec<P<MuType>>) -> MuType_ {
let struct_ty_ = StructType_{tys: list};
// if there is an attempt to use a same tag for different struct,
......@@ -212,9 +272,8 @@ impl MuType_ {
match STRUCT_TAG_MAP.read().unwrap().get(&tag) {
Some(old_struct_ty_) => {
if struct_ty_ != *old_struct_ty_ {
panic!(format!(
"trying to insert {:?} as {}, while the old struct is defined as {:?}",
struct_ty_, tag, old_struct_ty_))
panic!("trying to insert {} as {}, while the old struct is defined as {}",
struct_ty_, tag, old_struct_ty_)
}
},
None => {}
......@@ -227,8 +286,29 @@ impl MuType_ {
pub fn array(ty: P<MuType>, len: usize) -> MuType_ {
MuType_::Array(ty, len)
}
pub fn hybrid(fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> MuType_ {
MuType_::Hybrid(fix_tys, var_ty)
pub fn hybrid_empty(tag: HybridTag) -> MuType_ {
let hybrid_ty_ = HybridType_{fix_tys: vec![], var_ty: VOID_TYPE.clone()};
HYBRID_TAG_MAP.write().unwrap().insert(tag.clone(), hybrid_ty_);
MuType_::Hybrid(tag)
}
pub fn hybrid(tag: HybridTag, fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> MuType_ {
let hybrid_ty_ = HybridType_{fix_tys: fix_tys, var_ty: var_ty};
match HYBRID_TAG_MAP.read().unwrap().get(&tag) {
Some(old_hybrid_ty_) => {
if hybrid_ty_ != *old_hybrid_ty_ {
panic!("trying to insert {} as {}, while the old hybrid is defined as {}",
hybrid_ty_, tag, old_hybrid_ty_);
}
},
None => {}
}
HYBRID_TAG_MAP.write().unwrap().insert(tag.clone(), hybrid_ty_);
MuType_::Hybrid(tag)
}
pub fn void() -> MuType_ {
MuType_::Void
......@@ -300,7 +380,13 @@ pub fn is_traced(ty: &MuType) -> bool {
MuType_::ThreadRef
| MuType_::StackRef
| MuType_::Tagref64 => true,
MuType_::Hybrid(ref fix_tys, ref var_ty) => {
MuType_::Hybrid(ref tag) => {
let map = HYBRID_TAG_MAP.read().unwrap();
let hybrid_ty = map.get(tag).unwrap();
let ref fix_tys = hybrid_ty.fix_tys;
let ref var_ty = hybrid_ty.var_ty;
is_traced(var_ty) ||
fix_tys.into_iter().map(|ty| is_traced(ty))
.fold(false, |ret, this| ret || this)
......@@ -329,7 +415,13 @@ pub fn is_native_safe(ty: &MuType) -> bool {
| MuType_::Vector(ref elem_ty, _) => is_native_safe(elem_ty),
MuType_::UPtr(_) => true,
MuType_::UFuncPtr(_) => true,
MuType_::Hybrid(ref fix_tys, ref var_ty) => {
MuType_::Hybrid(ref tag) => {
let map = HYBRID_TAG_MAP.read().unwrap();
let hybrid_ty = map.get(tag).unwrap();
let ref fix_tys = hybrid_ty.fix_tys;
let ref var_ty = hybrid_ty.var_ty;
is_native_safe(var_ty) &&
fix_tys.into_iter().map(|ty| is_native_safe(&ty))
.fold(true, |ret, this| ret && this)
......
......@@ -757,7 +757,7 @@ impl <'a> InstructionSelection {
_ => panic!("didnt expect order {:?} with store inst", order)
}
let resolved_loc = self.emit_node_addr_to_value(loc_op, vm);
let resolved_loc = self.emit_node_addr_to_value(loc_op, f_content, f_context, vm);
let res_temp = self.get_result_value(node);
if self.match_ireg(node) {
......@@ -784,19 +784,19 @@ impl <'a> InstructionSelection {
}
};
let resolved_loc = self.emit_node_addr_to_value(loc_op, vm);
if self.match_ireg(val_op) {
let val = self.emit_ireg(val_op, f_content, f_context, vm);
let resolved_loc = self.emit_node_addr_to_value(loc_op, f_content, f_context, vm);
if self.match_iimm(val_op) {
let val = self.node_iimm_to_i32(val_op);
if generate_plain_mov {
self.backend.emit_mov_mem64_r64(&resolved_loc, &val);
self.backend.emit_mov_mem64_imm32(&resolved_loc, val);
} else {
unimplemented!()
}
} else if self.match_iimm(val_op) {
let val = self.node_iimm_to_i32(val_op);
} else if self.match_ireg(val_op) {
let val = self.emit_ireg(val_op, f_content, f_context, vm);
if generate_plain_mov {
self.backend.emit_mov_mem64_imm32(&resolved_loc, val);
self.backend.emit_mov_mem64_r64(&resolved_loc, &val);
} else {
unimplemented!()
}
......@@ -1876,7 +1876,7 @@ impl <'a> InstructionSelection {
}
}
fn emit_node_addr_to_value(&mut self, op: &P<TreeNode>, vm: &VM) -> P<Value> {
fn emit_node_addr_to_value(&mut self, op: &P<TreeNode>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) -> P<Value> {
match op.v {
TreeNode_::Value(ref pv) => {
match pv.v {
......@@ -1910,11 +1910,13 @@ impl <'a> InstructionSelection {
Value_::Constant(_) => unimplemented!()
}
}
TreeNode_::Instruction(_) => self.emit_get_mem_from_inst(op, vm)
TreeNode_::Instruction(_) => self.emit_get_mem_from_inst(op, f_content, f_context, vm)
}
}
fn emit_get_mem_from_inst(&mut self, op: &P<TreeNode>, vm: &VM) -> P<Value> {
fn emit_get_mem_from_inst(&mut self, op: &P<TreeNode>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) -> P<Value> {
let header_size = mm::objectmodel::OBJECT_HEADER_SIZE as i32;
match op.v {
TreeNode_::Instruction(ref inst) => {
let ref ops = inst.ops.read().unwrap();
......@@ -1922,8 +1924,44 @@ impl <'a> InstructionSelection {
match inst.v {
Instruction_::GetIRef(op_index) => {
let ref op = ops[op_index];
self.make_memory_op_base_offset(&op.clone_value(), mm::objectmodel::OBJECT_HEADER_SIZE as i32, ADDRESS_TYPE.clone(), vm)
self.make_memory_op_base_offset(&op.clone_value(), header_size, ADDRESS_TYPE.clone(), vm)
}
Instruction_::GetFieldIRef{is_ptr, base, index} => {
let ref base = ops[base];
let struct_ty = {
let ref iref_or_uptr_ty = base.clone_value().ty;
match iref_or_uptr_ty.v {
MuType_::IRef(ref ty)
| MuType_::UPtr(ref ty) => ty.clone(),
_ => panic!("expected the base for GetFieldIRef has a type of iref or uptr, found type: {}", iref_or_uptr_ty)
}
};
let ty_info = vm.get_backend_type_info(struct_ty.id());
let layout = match ty_info.struct_layout.as_ref() {
Some(layout) => layout,
None => panic!("a struct type does not have a layout yet: {:?}", ty_info)
};
debug_assert!(layout.len() > index);
let field_offset : i32 = layout[index] as i32;
match base.v {
TreeNode_::Instruction(Instruction{v: Instruction_::GetIRef(op_index), ref ops, ..}) => {
let ops_guard = ops.read().unwrap();
let ref inner = ops_guard[op_index];
self.make_memory_op_base_offset(&inner.clone_value(), header_size + field_offset, ADDRESS_TYPE.clone(), vm)
},
_ => {
let tmp = self.emit_ireg(base, f_content, f_context, vm);
self.make_memory_op_base_offset(&tmp, field_offset, ADDRESS_TYPE.clone(), vm)
}
}
}
_ => unimplemented!()
}
......
......@@ -99,7 +99,13 @@ pub fn resolve_backend_type_info (ty: &MuType, vm: &VM) -> BackendTypeInfo {
// - align is the most strict aligned element (from all fix tys and var ty)
// - size is fixed tys size
// - layout is fixed tys layout
MuType_::Hybrid(ref fix_tys, ref var_ty) => {
MuType_::Hybrid(ref name) => {
let read_lock = HYBRID_TAG_MAP.read().unwrap();
let hybrid = read_lock.get(name).unwrap();
let fix_tys = hybrid.get_fix_tys();
let var_ty = hybrid.get_var_ty();
// treat fix_tys as struct
let mut ret = layout_struct(fix_tys, vm);
......
......@@ -880,10 +880,11 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
self.struct_id_tags.push((id, tag.clone()));
MuType_::Struct(tag)
},
NodeType::TypeHybrid { id: _, ref fixedtys, varty } => {
let impl_fixedtys = fixedtys.iter().map(|t| self.ensure_type_rec(*t)).collect::<Vec<_>>();
let impl_varty = self.ensure_type_rec(varty);
MuType_::Hybrid(impl_fixedtys, impl_varty)
NodeType::TypeHybrid { id: _, ref fixedtys, varty } => {
unimplemented!()
// let impl_fixedtys = fixedtys.iter().map(|t| self.ensure_type_rec(*t)).collect::<Vec<_>>();
// let impl_varty = self.ensure_type_rec(varty);
// MuType_::Hybrid(impl_fixedtys, impl_varty)
},
NodeType::TypeArray { id: _, elemty, len } => {
let impl_elemty = self.ensure_type_rec(elemty);
......@@ -1529,27 +1530,28 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
}
},
NodeInst::NodeGetFieldIRef { id: _, result_id, is_ptr, refty, index, opnd } => {
let impl_opnd = self.get_treenode(fcb, opnd);
let impl_index = self.ensure_constint_of(index as u64);
let refty_node = self.b.bundle.types.get(&refty).unwrap();
let field_ty_id = match **refty_node {
NodeType::TypeStruct { id: _, ref fieldtys } => {
fieldtys[index as usize]
},
ref t => panic!("GETFIELDIREF {}: Expected struct type. actual: {:?}", id, t)
};
let impl_rvtype = self.ensure_iref_or_uptr(field_ty_id, is_ptr);
let impl_rv = self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
Instruction {
hdr: hdr,
value: Some(vec![impl_rv]),
ops: RwLock::new(vec![impl_opnd, impl_index]),
v: Instruction_::GetFieldIRef {
is_ptr: is_ptr,
base: 0,
index: 1,
},
}
unimplemented!()
// let impl_opnd = self.get_treenode(fcb, opnd);
// let impl_index = self.ensure_constint_of(index as u64);
// let refty_node = self.b.bundle.types.get(&refty).unwrap();
// let field_ty_id = match **refty_node {
// NodeType::TypeStruct { id: _, ref fieldtys } => {
// fieldtys[index as usize]
// },
// ref t => panic!("GETFIELDIREF {}: Expected struct type. actual: {:?}", id, t)
// };
// let impl_rvtype = self.ensure_iref_or_uptr(field_ty_id, is_ptr);
// let impl_rv = self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
// Instruction {
// hdr: hdr,
// value: Some(vec![impl_rv]),
// ops: RwLock::new(vec![impl_opnd, impl_index]),
// v: Instruction_::GetFieldIRef {
// is_ptr: is_ptr,
// base: 0,
// index: 1,
// },
// }
},
NodeInst::NodeGetElemIRef { id: _, result_id, is_ptr, refty, indty: _, opnd, index } => {
let impl_opnd = self.get_treenode(fcb, opnd);
......
This diff is collapsed.
......@@ -10,4 +10,5 @@ mod test_floatingpoint;
mod test_int;
mod test_binop;
mod test_controlflow;
mod test_call;
\ No newline at end of file
mod test_call;
mod test_mem_inst;
\ No newline at end of file
use mu::ast::types::*;
use mu::ast::ir::*;
use mu::ast::ptr::*;
use mu::ast::inst::*;
use mu::ast::op::*;
use mu::vm::*;
......@@ -41,12 +42,10 @@ fn test_ccall_exit() {
assert!(ret_code == 10);
}
fn ccall_exit() -> VM {
let vm = VM::new();
pub fn gen_ccall_exit(arg: P<TreeNode>, func_ver: &mut MuFunctionVersion, vm: &VM) -> Box<TreeNode> {
// .typedef @int32 = int<32>
let type_def_int32 = vm.declare_type(vm.next_id(), MuType_::int(32));
vm.set_name(type_def_int32.as_entity(), Mu("int32"));
vm.set_name(type_def_int32.as_entity(), Mu("exit_int32"));
// .typedef @exit_sig = (@int32) -> !
let exit_sig = vm.declare_func_sig(vm.next_id(), vec![], vec![type_def_int32.clone()]);
......@@ -60,6 +59,33 @@ fn ccall_exit() -> VM {
let const_exit = vm.declare_const(vm.next_id(), type_def_ufp_exit.clone(), Constant::ExternSym(C("exit")));
vm.set_name(const_exit.as_entity(), Mu("exit"));
// exprCCALL %const_exit (%const_int32_10) normal: %end(), exception: %end()
let blk_end_id = vm.next_id();
let const_exit_local = func_ver.new_constant(const_exit.clone());
func_ver.new_inst(Instruction{
hdr: MuEntityHeader::unnamed(vm.next_id()),
value: None,
ops: RwLock::new(vec![const_exit_local, arg]),
v: Instruction_::ExprCCall {
data: CallData {
func: 0,
args: vec![1],
convention: CallConvention::Foreign(ForeignFFI::C)
},
is_abort: false
}
})
}
fn ccall_exit() -> VM {
let vm = VM::new();
// .typedef @int32 = int<32>
let type_def_int32 = vm.declare_type(vm.next_id(), MuType_::int(32));
vm.set_name(type_def_int32.as_entity(), Mu("int32"));
// .const @int32_10 = 10
let const_int32_10 = vm.declare_const(vm.next_id(), type_def_int32.clone(), Constant::Int(10));
vm.set_name(const_int32_10.as_entity(), Mu("const_int32_10"));
......@@ -88,23 +114,9 @@ fn ccall_exit() -> VM {
// exprCCALL %const_exit (%const_int32_10) normal: %end(), exception: %end()
let blk_end_id = vm.next_id();
let const_exit_local = func_ver.new_constant(const_exit.clone());
let const_int32_10_local = func_ver.new_constant(const_int32_10.clone());
let blk_entry_ccall = func_ver.new_inst(Instruction{
hdr: MuEntityHeader::unnamed(vm.next_id()),
value: None,
ops: RwLock::new(vec![const_exit_local, const_int32_10_local]),
v: Instruction_::ExprCCall {
data: CallData {
func: 0,
args: vec![1],
convention: CallConvention::Foreign(ForeignFFI::C)
},
is_abort: false
}
});
let blk_entry_ccall = gen_ccall_exit(const_int32_10_local.clone(), &mut func_ver, &vm);
// RET %const_int32_0
let const_int32_0_local = func_ver.new_constant(const_int32_0.clone());
......
This diff is collapsed.
......@@ -47,7 +47,7 @@ fn create_types() -> Vec<P<MuType>> {
let t8 = MuType::new(8, MuType_::array(types[0].clone(), 5));
types.push(P(t8));
let t9 = MuType::new(9, MuType_::hybrid(vec![types[7].clone(), types[1].clone()], types[0].clone()));
let t9 = MuType::new(9, MuType_::hybrid("MyHybridTag1".to_string(), vec![types[7].clone(), types[1].clone()], types[0].clone()));
types.push(P(t9));
let t10 = MuType::new(10, MuType_::void());
......@@ -95,7 +95,7 @@ fn test_type_constructors() {
assert_type!(t7_struct_ty, "struct<int<8> float>");
}
assert_type!(*types[8], "array<int<8> 5>");
assert_type!(*types[9], "hybrid<[MyStructTag1(struct), float] int<8>>");
assert_type!(*types[9], "MyHybridTag1(hybrid)");
assert_type!(*types[10], "void");
assert_type!(*types[11], "threadref");
assert_type!(*types[12], "stackref");
......@@ -142,9 +142,9 @@ fn test_is_traced() {
let ref_array = MuType::new(102, MuType_::array(types[3].clone(), 5));
assert_eq!(is_traced(&ref_array), true);
assert_eq!(is_traced(&types[9]), false);
let fix_ref_hybrid = MuType::new(103, MuType_::hybrid(vec![types[3].clone(), types[0].clone()], types[0].clone()));
let fix_ref_hybrid = MuType::new(103, MuType_::hybrid("FixRefHybrid".to_string(), vec![types[3].clone(), types[0].clone()], types[0].clone()));
assert_eq!(is_traced(&fix_ref_hybrid), true);
let var_ref_hybrid = MuType::new(104, MuType_::hybrid(vec![types[0].clone(), types[1].clone()], types[3].clone()));
let var_ref_hybrid = MuType::new(104, MuType_::hybrid("VarRefHybrid".to_string(), vec![types[0].clone(), types[1].clone()], types[3].clone()));
assert_eq!(is_traced(&var_ref_hybrid), true);
assert_eq!(is_traced(&types[10]), false);
assert_eq!(is_traced(&types[11]), true);
......@@ -175,9 +175,9 @@ fn test_is_native_safe() {
let ref_array = MuType::new(102, MuType_::array(types[3].clone(), 5));
assert_eq!(is_native_safe(&ref_array), false);
assert_eq!(is_native_safe(&types[9]), true);
let fix_ref_hybrid = MuType::new(103, MuType_::hybrid(vec![types[3].clone(), types[0].clone()], types[0].clone()));
let fix_ref_hybrid = MuType::new(103, MuType_::hybrid("FixRefHybrid".to_string(), vec![types[3].clone(), types[0].clone()], types[0].clone()));
assert_eq!(is_native_safe(&fix_ref_hybrid), false);
let var_ref_hybrid = MuType::new(104, MuType_::hybrid(vec![types[0].clone(), types[1].clone()], types[3].clone()));
let var_ref_hybrid = MuType::new(104, MuType_::hybrid("VarRefHybrid".to_string(), vec![types[0].clone(), types[1].clone()], types[3].clone()));
assert_eq!(is_native_safe(&var_ref_hybrid), false);
assert_eq!(is_native_safe(&types[10]), true);
assert_eq!(is_native_safe(&types[11]), false);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment