WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit 8da4a884 authored by qinsoon's avatar qinsoon
Browse files

refactor hybrid. implemented GetFieldIRef

Note: made two places in api implementation as unimplemented as
the instruction/type chnaged.
parent 9938b350
...@@ -176,7 +176,7 @@ pub enum Instruction_ { ...@@ -176,7 +176,7 @@ pub enum Instruction_ {
GetFieldIRef{ GetFieldIRef{
is_ptr: bool, is_ptr: bool,
base: OpIndex, // iref or uptr base: OpIndex, // iref or uptr
index: OpIndex // constant index: usize // constant
}, },
// iref|uptr<array<T N>> int<M> -> iref|uptr<T> // iref|uptr<array<T N>> int<M> -> iref|uptr<T>
...@@ -306,7 +306,7 @@ impl Instruction_ { ...@@ -306,7 +306,7 @@ impl Instruction_ {
&Instruction_::GetIRef(reference) => format!("GETIREF {}", ops[reference]), &Instruction_::GetIRef(reference) => format!("GETIREF {}", ops[reference]),
&Instruction_::GetFieldIRef{is_ptr, base, index} => { &Instruction_::GetFieldIRef{is_ptr, base, index} => {
let ptr = select_value!(is_ptr, "PTR", ""); let ptr = select_value!(is_ptr, "PTR", "");
format!("GETFIELDIREF {} {} {}", ptr, ops[base], ops[index]) format!("GETFIELDIREF {} {} {}", ptr, ops[base], index)
}, },
&Instruction_::GetElementIRef{is_ptr, base, index} => { &Instruction_::GetElementIRef{is_ptr, base, index} => {
let ptr = select_value!(is_ptr, "PTR", ""); let ptr = select_value!(is_ptr, "PTR", "");
......
...@@ -79,7 +79,7 @@ pub fn pick_op_code_for_ssa(ty: &P<MuType>) -> OpCode { ...@@ -79,7 +79,7 @@ pub fn pick_op_code_for_ssa(ty: &P<MuType>) -> OpCode {
// we are not supposed to have these as SSA // we are not supposed to have these as SSA
Struct(_) Struct(_)
| Array(_, _) | Array(_, _)
| Hybrid(_, _) | Hybrid(_)
| Void => panic!("Not expecting {} as SSA", ty), | Void => panic!("Not expecting {} as SSA", ty),
// unimplemented // unimplemented
Vector(_, _) => unimplemented!() Vector(_, _) => unimplemented!()
...@@ -107,7 +107,7 @@ pub fn pick_op_code_for_value(ty: &P<MuType>) -> OpCode { ...@@ -107,7 +107,7 @@ pub fn pick_op_code_for_value(ty: &P<MuType>) -> OpCode {
// we are not supposed to have these as SSA // we are not supposed to have these as SSA
Struct(_) Struct(_)
| Array(_, _) | Array(_, _)
| Hybrid(_, _) | Hybrid(_)
| Void => unimplemented!(), | Void => unimplemented!(),
// unimplemented // unimplemented
Vector(_, _) => unimplemented!() Vector(_, _) => unimplemented!()
......
...@@ -32,6 +32,10 @@ lazy_static! { ...@@ -32,6 +32,10 @@ lazy_static! {
pub static ref DOUBLE_TYPE : P<MuType> = P( pub static ref DOUBLE_TYPE : P<MuType> = P(
MuType::new(new_internal_id(), MuType_::double()) MuType::new(new_internal_id(), MuType_::double())
); );
pub static ref VOID_TYPE : P<MuType> = P(
MuType::new(new_internal_id(), MuType_::void())
);
pub static ref INTERNAL_TYPES : Vec<P<MuType>> = vec![ pub static ref INTERNAL_TYPES : Vec<P<MuType>> = vec![
ADDRESS_TYPE.clone(), ADDRESS_TYPE.clone(),
...@@ -58,6 +62,8 @@ impl MuType { ...@@ -58,6 +62,8 @@ impl MuType {
} }
} }
pub type StructTag = MuName;
pub type HybridTag = MuName;
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)] #[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub enum MuType_ { pub enum MuType_ {
/// int <length> /// int <length>
...@@ -78,13 +84,13 @@ pub enum MuType_ { ...@@ -78,13 +84,13 @@ pub enum MuType_ {
UPtr (P<MuType>), UPtr (P<MuType>),
/// struct<T1 T2 ...> /// struct<T1 T2 ...>
Struct (MuName), Struct (StructTag),
/// array<T length> /// array<T length>
Array (P<MuType>, usize), Array (P<MuType>, usize),
/// hybrid<F1 F2 ... V>: a hybrid of fixed length parts and a variable length part /// hybrid<F1 F2 ... V>: a hybrid of fixed length parts and a variable length part
Hybrid (Vec<P<MuType>>, P<MuType>), Hybrid (HybridTag),
/// void /// void
Void, Void,
...@@ -124,7 +130,6 @@ impl fmt::Display for MuType_ { ...@@ -124,7 +130,6 @@ impl fmt::Display for MuType_ {
&MuType_::WeakRef(ref ty) => write!(f, "weakref<{}>", ty), &MuType_::WeakRef(ref ty) => write!(f, "weakref<{}>", ty),
&MuType_::UPtr(ref ty) => write!(f, "uptr<{}>", ty), &MuType_::UPtr(ref ty) => write!(f, "uptr<{}>", ty),
&MuType_::Array(ref ty, size) => write!(f, "array<{} {}>", ty, size), &MuType_::Array(ref ty, size) => write!(f, "array<{} {}>", ty, size),
&MuType_::Hybrid(ref fix_tys, ref var_ty) => write!(f, "hybrid<[{}] {}>", vec_utils::as_str(fix_tys), var_ty),
&MuType_::Void => write!(f, "void"), &MuType_::Void => write!(f, "void"),
&MuType_::ThreadRef => write!(f, "threadref"), &MuType_::ThreadRef => write!(f, "threadref"),
&MuType_::StackRef => write!(f, "stackref"), &MuType_::StackRef => write!(f, "stackref"),
...@@ -132,14 +137,17 @@ impl fmt::Display for MuType_ { ...@@ -132,14 +137,17 @@ impl fmt::Display for MuType_ {
&MuType_::Vector(ref ty, size) => write!(f, "vector<{} {}>", ty, size), &MuType_::Vector(ref ty, size) => write!(f, "vector<{} {}>", ty, size),
&MuType_::FuncRef(ref sig) => write!(f, "funcref<{}>", sig), &MuType_::FuncRef(ref sig) => write!(f, "funcref<{}>", sig),
&MuType_::UFuncPtr(ref sig) => write!(f, "ufuncref<{}>", sig), &MuType_::UFuncPtr(ref sig) => write!(f, "ufuncref<{}>", sig),
&MuType_::Struct(ref tag) => write!(f, "{}(struct)", tag) &MuType_::Struct(ref tag) => write!(f, "{}(struct)", tag),
&MuType_::Hybrid(ref tag) => write!(f, "{}(hybrid)", tag)
} }
} }
} }
lazy_static! { lazy_static! {
/// storing a map from MuName to StructType_ /// storing a map from MuName to StructType_
pub static ref STRUCT_TAG_MAP : RwLock<HashMap<MuName, StructType_>> = RwLock::new(HashMap::new()); pub static ref STRUCT_TAG_MAP : RwLock<HashMap<StructTag, StructType_>> = RwLock::new(HashMap::new());
/// storing a map from MuName to HybridType_
pub static ref HYBRID_TAG_MAP : RwLock<HashMap<HybridTag, HybridType_>> = RwLock::new(HashMap::new());
} }
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)] #[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
...@@ -176,6 +184,47 @@ impl StructType_ { ...@@ -176,6 +184,47 @@ impl StructType_ {
} }
} }
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub struct HybridType_ {
fix_tys: Vec<P<MuType>>,
var_ty : P<MuType>
}
impl HybridType_ {
pub fn new(fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> HybridType_ {
HybridType_ {fix_tys: fix_tys, var_ty: var_ty}
}
pub fn set_tys(&mut self, mut fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) {
self.fix_tys.clear();
self.fix_tys.append(&mut fix_tys);
self.var_ty = var_ty;
}
pub fn get_fix_tys(&self) -> &Vec<P<MuType>> {
&self.fix_tys
}
pub fn get_var_ty(&self) -> &P<MuType> {
&self.var_ty
}
}
impl fmt::Display for HybridType_ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "hybrid<").unwrap();
for i in 0..self.fix_tys.len() {
let ty = &self.fix_tys[i];
write!(f, "{}", ty).unwrap();
if i != self.fix_tys.len() - 1 {
write!(f, " ").unwrap();
}
}
write!(f, "|{}>", self.var_ty)
}
}
impl MuType_ { impl MuType_ {
pub fn int(len: usize) -> MuType_ { pub fn int(len: usize) -> MuType_ {
MuType_::Int(len) MuType_::Int(len)
...@@ -204,7 +253,18 @@ impl MuType_ { ...@@ -204,7 +253,18 @@ impl MuType_ {
MuType_::Struct(tag) MuType_::Struct(tag)
} }
pub fn mustruct(tag: MuName, list: Vec<P<MuType>>) -> MuType_ { pub fn mustruct_put(tag: MuName, mut list: Vec<P<MuType>>) {
let mut map_guard = STRUCT_TAG_MAP.write().unwrap();
match map_guard.get_mut(&tag) {
Some(struct_ty_) => {
struct_ty_.tys.clear();
struct_ty_.tys.append(&mut list);
},
None => panic!("call mustruct_empty() to create an empty struct before mustruct_put()")
}
}
pub fn mustruct(tag: StructTag, list: Vec<P<MuType>>) -> MuType_ {
let struct_ty_ = StructType_{tys: list}; let struct_ty_ = StructType_{tys: list};
// if there is an attempt to use a same tag for different struct, // if there is an attempt to use a same tag for different struct,
...@@ -212,9 +272,8 @@ impl MuType_ { ...@@ -212,9 +272,8 @@ impl MuType_ {
match STRUCT_TAG_MAP.read().unwrap().get(&tag) { match STRUCT_TAG_MAP.read().unwrap().get(&tag) {
Some(old_struct_ty_) => { Some(old_struct_ty_) => {
if struct_ty_ != *old_struct_ty_ { if struct_ty_ != *old_struct_ty_ {
panic!(format!( panic!("trying to insert {} as {}, while the old struct is defined as {}",
"trying to insert {:?} as {}, while the old struct is defined as {:?}", struct_ty_, tag, old_struct_ty_)
struct_ty_, tag, old_struct_ty_))
} }
}, },
None => {} None => {}
...@@ -227,8 +286,29 @@ impl MuType_ { ...@@ -227,8 +286,29 @@ impl MuType_ {
pub fn array(ty: P<MuType>, len: usize) -> MuType_ { pub fn array(ty: P<MuType>, len: usize) -> MuType_ {
MuType_::Array(ty, len) MuType_::Array(ty, len)
} }
pub fn hybrid(fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> MuType_ { pub fn hybrid_empty(tag: HybridTag) -> MuType_ {
MuType_::Hybrid(fix_tys, var_ty) let hybrid_ty_ = HybridType_{fix_tys: vec![], var_ty: VOID_TYPE.clone()};
HYBRID_TAG_MAP.write().unwrap().insert(tag.clone(), hybrid_ty_);
MuType_::Hybrid(tag)
}
pub fn hybrid(tag: HybridTag, fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> MuType_ {
let hybrid_ty_ = HybridType_{fix_tys: fix_tys, var_ty: var_ty};
match HYBRID_TAG_MAP.read().unwrap().get(&tag) {
Some(old_hybrid_ty_) => {
if hybrid_ty_ != *old_hybrid_ty_ {
panic!("trying to insert {} as {}, while the old hybrid is defined as {}",
hybrid_ty_, tag, old_hybrid_ty_);
}
},
None => {}
}
HYBRID_TAG_MAP.write().unwrap().insert(tag.clone(), hybrid_ty_);
MuType_::Hybrid(tag)
} }
pub fn void() -> MuType_ { pub fn void() -> MuType_ {
MuType_::Void MuType_::Void
...@@ -300,7 +380,13 @@ pub fn is_traced(ty: &MuType) -> bool { ...@@ -300,7 +380,13 @@ pub fn is_traced(ty: &MuType) -> bool {
MuType_::ThreadRef MuType_::ThreadRef
| MuType_::StackRef | MuType_::StackRef
| MuType_::Tagref64 => true, | MuType_::Tagref64 => true,
MuType_::Hybrid(ref fix_tys, ref var_ty) => { MuType_::Hybrid(ref tag) => {
let map = HYBRID_TAG_MAP.read().unwrap();
let hybrid_ty = map.get(tag).unwrap();
let ref fix_tys = hybrid_ty.fix_tys;
let ref var_ty = hybrid_ty.var_ty;
is_traced(var_ty) || is_traced(var_ty) ||
fix_tys.into_iter().map(|ty| is_traced(ty)) fix_tys.into_iter().map(|ty| is_traced(ty))
.fold(false, |ret, this| ret || this) .fold(false, |ret, this| ret || this)
...@@ -329,7 +415,13 @@ pub fn is_native_safe(ty: &MuType) -> bool { ...@@ -329,7 +415,13 @@ pub fn is_native_safe(ty: &MuType) -> bool {
| MuType_::Vector(ref elem_ty, _) => is_native_safe(elem_ty), | MuType_::Vector(ref elem_ty, _) => is_native_safe(elem_ty),
MuType_::UPtr(_) => true, MuType_::UPtr(_) => true,
MuType_::UFuncPtr(_) => true, MuType_::UFuncPtr(_) => true,
MuType_::Hybrid(ref fix_tys, ref var_ty) => { MuType_::Hybrid(ref tag) => {
let map = HYBRID_TAG_MAP.read().unwrap();
let hybrid_ty = map.get(tag).unwrap();
let ref fix_tys = hybrid_ty.fix_tys;
let ref var_ty = hybrid_ty.var_ty;
is_native_safe(var_ty) && is_native_safe(var_ty) &&
fix_tys.into_iter().map(|ty| is_native_safe(&ty)) fix_tys.into_iter().map(|ty| is_native_safe(&ty))
.fold(true, |ret, this| ret && this) .fold(true, |ret, this| ret && this)
......
...@@ -757,7 +757,7 @@ impl <'a> InstructionSelection { ...@@ -757,7 +757,7 @@ impl <'a> InstructionSelection {
_ => panic!("didnt expect order {:?} with store inst", order) _ => panic!("didnt expect order {:?} with store inst", order)
} }
let resolved_loc = self.emit_node_addr_to_value(loc_op, vm); let resolved_loc = self.emit_node_addr_to_value(loc_op, f_content, f_context, vm);
let res_temp = self.get_result_value(node); let res_temp = self.get_result_value(node);
if self.match_ireg(node) { if self.match_ireg(node) {
...@@ -784,19 +784,19 @@ impl <'a> InstructionSelection { ...@@ -784,19 +784,19 @@ impl <'a> InstructionSelection {
} }
}; };
let resolved_loc = self.emit_node_addr_to_value(loc_op, vm); let resolved_loc = self.emit_node_addr_to_value(loc_op, f_content, f_context, vm);
if self.match_ireg(val_op) { if self.match_iimm(val_op) {
let val = self.emit_ireg(val_op, f_content, f_context, vm); let val = self.node_iimm_to_i32(val_op);
if generate_plain_mov { if generate_plain_mov {
self.backend.emit_mov_mem64_r64(&resolved_loc, &val); self.backend.emit_mov_mem64_imm32(&resolved_loc, val);
} else { } else {
unimplemented!() unimplemented!()
} }
} else if self.match_iimm(val_op) { } else if self.match_ireg(val_op) {
let val = self.node_iimm_to_i32(val_op); let val = self.emit_ireg(val_op, f_content, f_context, vm);
if generate_plain_mov { if generate_plain_mov {
self.backend.emit_mov_mem64_imm32(&resolved_loc, val); self.backend.emit_mov_mem64_r64(&resolved_loc, &val);
} else { } else {
unimplemented!() unimplemented!()
} }
...@@ -1876,7 +1876,7 @@ impl <'a> InstructionSelection { ...@@ -1876,7 +1876,7 @@ impl <'a> InstructionSelection {
} }
} }
fn emit_node_addr_to_value(&mut self, op: &P<TreeNode>, vm: &VM) -> P<Value> { fn emit_node_addr_to_value(&mut self, op: &P<TreeNode>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) -> P<Value> {
match op.v { match op.v {
TreeNode_::Value(ref pv) => { TreeNode_::Value(ref pv) => {
match pv.v { match pv.v {
...@@ -1910,11 +1910,13 @@ impl <'a> InstructionSelection { ...@@ -1910,11 +1910,13 @@ impl <'a> InstructionSelection {
Value_::Constant(_) => unimplemented!() Value_::Constant(_) => unimplemented!()
} }
} }
TreeNode_::Instruction(_) => self.emit_get_mem_from_inst(op, vm) TreeNode_::Instruction(_) => self.emit_get_mem_from_inst(op, f_content, f_context, vm)
} }
} }
fn emit_get_mem_from_inst(&mut self, op: &P<TreeNode>, vm: &VM) -> P<Value> { fn emit_get_mem_from_inst(&mut self, op: &P<TreeNode>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) -> P<Value> {
let header_size = mm::objectmodel::OBJECT_HEADER_SIZE as i32;
match op.v { match op.v {
TreeNode_::Instruction(ref inst) => { TreeNode_::Instruction(ref inst) => {
let ref ops = inst.ops.read().unwrap(); let ref ops = inst.ops.read().unwrap();
...@@ -1922,8 +1924,44 @@ impl <'a> InstructionSelection { ...@@ -1922,8 +1924,44 @@ impl <'a> InstructionSelection {
match inst.v { match inst.v {
Instruction_::GetIRef(op_index) => { Instruction_::GetIRef(op_index) => {
let ref op = ops[op_index]; let ref op = ops[op_index];
self.make_memory_op_base_offset(&op.clone_value(), mm::objectmodel::OBJECT_HEADER_SIZE as i32, ADDRESS_TYPE.clone(), vm) self.make_memory_op_base_offset(&op.clone_value(), header_size, ADDRESS_TYPE.clone(), vm)
}
Instruction_::GetFieldIRef{is_ptr, base, index} => {
let ref base = ops[base];
let struct_ty = {
let ref iref_or_uptr_ty = base.clone_value().ty;
match iref_or_uptr_ty.v {
MuType_::IRef(ref ty)
| MuType_::UPtr(ref ty) => ty.clone(),
_ => panic!("expected the base for GetFieldIRef has a type of iref or uptr, found type: {}", iref_or_uptr_ty)
}
};
let ty_info = vm.get_backend_type_info(struct_ty.id());
let layout = match ty_info.struct_layout.as_ref() {
Some(layout) => layout,
None => panic!("a struct type does not have a layout yet: {:?}", ty_info)
};
debug_assert!(layout.len() > index);
let field_offset : i32 = layout[index] as i32;
match base.v {
TreeNode_::Instruction(Instruction{v: Instruction_::GetIRef(op_index), ref ops, ..}) => {
let ops_guard = ops.read().unwrap();
let ref inner = ops_guard[op_index];
self.make_memory_op_base_offset(&inner.clone_value(), header_size + field_offset, ADDRESS_TYPE.clone(), vm)
},
_ => {
let tmp = self.emit_ireg(base, f_content, f_context, vm);
self.make_memory_op_base_offset(&tmp, field_offset, ADDRESS_TYPE.clone(), vm)
}
}
} }
_ => unimplemented!() _ => unimplemented!()
} }
......
...@@ -99,7 +99,13 @@ pub fn resolve_backend_type_info (ty: &MuType, vm: &VM) -> BackendTypeInfo { ...@@ -99,7 +99,13 @@ pub fn resolve_backend_type_info (ty: &MuType, vm: &VM) -> BackendTypeInfo {
// - align is the most strict aligned element (from all fix tys and var ty) // - align is the most strict aligned element (from all fix tys and var ty)
// - size is fixed tys size // - size is fixed tys size
// - layout is fixed tys layout // - layout is fixed tys layout
MuType_::Hybrid(ref fix_tys, ref var_ty) => { MuType_::Hybrid(ref name) => {
let read_lock = HYBRID_TAG_MAP.read().unwrap();
let hybrid = read_lock.get(name).unwrap();
let fix_tys = hybrid.get_fix_tys();
let var_ty = hybrid.get_var_ty();
// treat fix_tys as struct // treat fix_tys as struct
let mut ret = layout_struct(fix_tys, vm); let mut ret = layout_struct(fix_tys, vm);
......
...@@ -880,10 +880,11 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> { ...@@ -880,10 +880,11 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
self.struct_id_tags.push((id, tag.clone())); self.struct_id_tags.push((id, tag.clone()));
MuType_::Struct(tag) MuType_::Struct(tag)
}, },
NodeType::TypeHybrid { id: _, ref fixedtys, varty } => { NodeType::TypeHybrid { id: _, ref fixedtys, varty } => {
let impl_fixedtys = fixedtys.iter().map(|t| self.ensure_type_rec(*t)).collect::<Vec<_>>(); unimplemented!()
let impl_varty = self.ensure_type_rec(varty); // let impl_fixedtys = fixedtys.iter().map(|t| self.ensure_type_rec(*t)).collect::<Vec<_>>();
MuType_::Hybrid(impl_fixedtys, impl_varty) // let impl_varty = self.ensure_type_rec(varty);
// MuType_::Hybrid(impl_fixedtys, impl_varty)
}, },
NodeType::TypeArray { id: _, elemty, len } => { NodeType::TypeArray { id: _, elemty, len } => {
let impl_elemty = self.ensure_type_rec(elemty); let impl_elemty = self.ensure_type_rec(elemty);
...@@ -1529,27 +1530,28 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> { ...@@ -1529,27 +1530,28 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
} }
}, },
NodeInst::NodeGetFieldIRef { id: _, result_id, is_ptr, refty, index, opnd } => { NodeInst::NodeGetFieldIRef { id: _, result_id, is_ptr, refty, index, opnd } => {
let impl_opnd = self.get_treenode(fcb, opnd); unimplemented!()
let impl_index = self.ensure_constint_of(index as u64); // let impl_opnd = self.get_treenode(fcb, opnd);
let refty_node = self.b.bundle.types.get(&refty).unwrap(); // let impl_index = self.ensure_constint_of(index as u64);
let field_ty_id = match **refty_node { // let refty_node = self.b.bundle.types.get(&refty).unwrap();
NodeType::TypeStruct { id: _, ref fieldtys } => { // let field_ty_id = match **refty_node {
fieldtys[index as usize] // NodeType::TypeStruct { id: _, ref fieldtys } => {
}, // fieldtys[index as usize]
ref t => panic!("GETFIELDIREF {}: Expected struct type. actual: {:?}", id, t) // },
}; // ref t => panic!("GETFIELDIREF {}: Expected struct type. actual: {:?}", id, t)
let impl_rvtype = self.ensure_iref_or_uptr(field_ty_id, is_ptr); // };
let impl_rv = self.new_ssa(fcb, result_id, impl_rvtype).clone_value(); // let impl_rvtype = self.ensure_iref_or_uptr(field_ty_id, is_ptr);
Instruction { // let impl_rv = self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
hdr: hdr, // Instruction {
value: Some(vec![impl_rv]), // hdr: hdr,
ops: RwLock::new(vec![impl_opnd, impl_index]), // value: Some(vec![impl_rv]),
v: Instruction_::GetFieldIRef { // ops: RwLock::new(vec![impl_opnd, impl_index]),
is_ptr: is_ptr, // v: Instruction_::GetFieldIRef {
base: 0, // is_ptr: is_ptr,
index: 1, // base: 0,
}, // index: 1,
} // },
// }
}, },
NodeInst::NodeGetElemIRef { id: _, result_id, is_ptr, refty, indty: _, opnd, index } => { NodeInst::NodeGetElemIRef { id: _, result_id, is_ptr, refty, indty: _, opnd, index } => {
let impl_opnd = self.get_treenode(fcb, opnd); let impl_opnd = self.get_treenode(fcb, opnd);
......
...@@ -59,75 +59,95 @@ const VM_SERIALIZE_FIELDS : usize = 13; ...@@ -59,75 +59,95 @@ const VM_SERIALIZE_FIELDS : usize = 13;
impl Encodable for VM { impl Encodable for VM {
fn encode<S: Encoder> (&self, s: &mut S) -> Result<(), S::Error> { fn encode<S: Encoder> (&self, s: &mut S) -> Result<(), S::Error> {
let mut field_i = 0;
// serialize VM_SERIALIZE_FIELDS fields // serialize VM_SERIALIZE_FIELDS fields
// PLUS ONE extra global STRUCT_TAG_MAP // PLUS ONE extra global STRUCT_TAG_MAP
s.emit_struct("VM", VM_SERIALIZE_FIELDS + 1, |s| { s.emit_struct("VM", VM_SERIALIZE_FIELDS + 2, |s| {
// next_id // next_id
trace!("...serializing next_id"); trace!("...serializing next_id");
try!(s.emit_struct_field("next_id", 0, |s| { try!(s.emit_struct_field("next_id", field_i, |s| {
s.emit_usize(self.next_id.load(Ordering::SeqCst)) s.emit_usize(self.next_id.load(Ordering::SeqCst))
})); }));
field_i += 1;
// id_name_map // id_name_map
trace!("...serializing id_name_map"); trace!("...serializing id_name_map");
{ {
let map : &HashMap<MuID, MuName> = &self.id_name_map.read().unwrap(); let map : &HashMap<MuID, MuName> = &self.id_name_map.read().unwrap();
try!(s.emit_struct_field("id_name_map", 1, |s| map.encode(s))); try!(s.emit_struct_field("id_name_map", field_i, |s| map.encode(s)));
} }
field_i += 1;
// name_id_map // name_id_map
trace!("...serializing name_id_map"); trace!("...serializing name_id_map");
{ {
let map : &HashMap<MuName, MuID> = &self.name_id_map.read().unwrap(); let map : &HashMap<MuName, MuID> = &self.name_id_map.read().unwrap();