WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

Commit 8da4a884 authored by qinsoon's avatar qinsoon
Browse files

refactor hybrid. implemented GetFieldIRef

Note: made two places in api implementation as unimplemented as
the instruction/type chnaged.
parent 9938b350
......@@ -176,7 +176,7 @@ pub enum Instruction_ {
GetFieldIRef{
is_ptr: bool,
base: OpIndex, // iref or uptr
index: OpIndex // constant
index: usize // constant
},
// iref|uptr<array<T N>> int<M> -> iref|uptr<T>
......@@ -306,7 +306,7 @@ impl Instruction_ {
&Instruction_::GetIRef(reference) => format!("GETIREF {}", ops[reference]),
&Instruction_::GetFieldIRef{is_ptr, base, index} => {
let ptr = select_value!(is_ptr, "PTR", "");
format!("GETFIELDIREF {} {} {}", ptr, ops[base], ops[index])
format!("GETFIELDIREF {} {} {}", ptr, ops[base], index)
},
&Instruction_::GetElementIRef{is_ptr, base, index} => {
let ptr = select_value!(is_ptr, "PTR", "");
......
......@@ -79,7 +79,7 @@ pub fn pick_op_code_for_ssa(ty: &P<MuType>) -> OpCode {
// we are not supposed to have these as SSA
Struct(_)
| Array(_, _)
| Hybrid(_, _)
| Hybrid(_)
| Void => panic!("Not expecting {} as SSA", ty),
// unimplemented
Vector(_, _) => unimplemented!()
......@@ -107,7 +107,7 @@ pub fn pick_op_code_for_value(ty: &P<MuType>) -> OpCode {
// we are not supposed to have these as SSA
Struct(_)
| Array(_, _)
| Hybrid(_, _)
| Hybrid(_)
| Void => unimplemented!(),
// unimplemented
Vector(_, _) => unimplemented!()
......
......@@ -32,6 +32,10 @@ lazy_static! {
pub static ref DOUBLE_TYPE : P<MuType> = P(
MuType::new(new_internal_id(), MuType_::double())
);
pub static ref VOID_TYPE : P<MuType> = P(
MuType::new(new_internal_id(), MuType_::void())
);
pub static ref INTERNAL_TYPES : Vec<P<MuType>> = vec![
ADDRESS_TYPE.clone(),
......@@ -58,6 +62,8 @@ impl MuType {
}
}
pub type StructTag = MuName;
pub type HybridTag = MuName;
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub enum MuType_ {
/// int <length>
......@@ -78,13 +84,13 @@ pub enum MuType_ {
UPtr (P<MuType>),
/// struct<T1 T2 ...>
Struct (MuName),
Struct (StructTag),
/// array<T length>
Array (P<MuType>, usize),
/// hybrid<F1 F2 ... V>: a hybrid of fixed length parts and a variable length part
Hybrid (Vec<P<MuType>>, P<MuType>),
Hybrid (HybridTag),
/// void
Void,
......@@ -124,7 +130,6 @@ impl fmt::Display for MuType_ {
&MuType_::WeakRef(ref ty) => write!(f, "weakref<{}>", ty),
&MuType_::UPtr(ref ty) => write!(f, "uptr<{}>", ty),
&MuType_::Array(ref ty, size) => write!(f, "array<{} {}>", ty, size),
&MuType_::Hybrid(ref fix_tys, ref var_ty) => write!(f, "hybrid<[{}] {}>", vec_utils::as_str(fix_tys), var_ty),
&MuType_::Void => write!(f, "void"),
&MuType_::ThreadRef => write!(f, "threadref"),
&MuType_::StackRef => write!(f, "stackref"),
......@@ -132,14 +137,17 @@ impl fmt::Display for MuType_ {
&MuType_::Vector(ref ty, size) => write!(f, "vector<{} {}>", ty, size),
&MuType_::FuncRef(ref sig) => write!(f, "funcref<{}>", sig),
&MuType_::UFuncPtr(ref sig) => write!(f, "ufuncref<{}>", sig),
&MuType_::Struct(ref tag) => write!(f, "{}(struct)", tag)
&MuType_::Struct(ref tag) => write!(f, "{}(struct)", tag),
&MuType_::Hybrid(ref tag) => write!(f, "{}(hybrid)", tag)
}
}
}
lazy_static! {
/// storing a map from MuName to StructType_
pub static ref STRUCT_TAG_MAP : RwLock<HashMap<MuName, StructType_>> = RwLock::new(HashMap::new());
pub static ref STRUCT_TAG_MAP : RwLock<HashMap<StructTag, StructType_>> = RwLock::new(HashMap::new());
/// storing a map from MuName to HybridType_
pub static ref HYBRID_TAG_MAP : RwLock<HashMap<HybridTag, HybridType_>> = RwLock::new(HashMap::new());
}
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
......@@ -176,6 +184,47 @@ impl StructType_ {
}
}
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub struct HybridType_ {
fix_tys: Vec<P<MuType>>,
var_ty : P<MuType>
}
impl HybridType_ {
pub fn new(fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> HybridType_ {
HybridType_ {fix_tys: fix_tys, var_ty: var_ty}
}
pub fn set_tys(&mut self, mut fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) {
self.fix_tys.clear();
self.fix_tys.append(&mut fix_tys);
self.var_ty = var_ty;
}
pub fn get_fix_tys(&self) -> &Vec<P<MuType>> {
&self.fix_tys
}
pub fn get_var_ty(&self) -> &P<MuType> {
&self.var_ty
}
}
impl fmt::Display for HybridType_ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "hybrid<").unwrap();
for i in 0..self.fix_tys.len() {
let ty = &self.fix_tys[i];
write!(f, "{}", ty).unwrap();
if i != self.fix_tys.len() - 1 {
write!(f, " ").unwrap();
}
}
write!(f, "|{}>", self.var_ty)
}
}
impl MuType_ {
pub fn int(len: usize) -> MuType_ {
MuType_::Int(len)
......@@ -204,7 +253,18 @@ impl MuType_ {
MuType_::Struct(tag)
}
pub fn mustruct(tag: MuName, list: Vec<P<MuType>>) -> MuType_ {
pub fn mustruct_put(tag: MuName, mut list: Vec<P<MuType>>) {
let mut map_guard = STRUCT_TAG_MAP.write().unwrap();
match map_guard.get_mut(&tag) {
Some(struct_ty_) => {
struct_ty_.tys.clear();
struct_ty_.tys.append(&mut list);
},
None => panic!("call mustruct_empty() to create an empty struct before mustruct_put()")
}
}
pub fn mustruct(tag: StructTag, list: Vec<P<MuType>>) -> MuType_ {
let struct_ty_ = StructType_{tys: list};
// if there is an attempt to use a same tag for different struct,
......@@ -212,9 +272,8 @@ impl MuType_ {
match STRUCT_TAG_MAP.read().unwrap().get(&tag) {
Some(old_struct_ty_) => {
if struct_ty_ != *old_struct_ty_ {
panic!(format!(
"trying to insert {:?} as {}, while the old struct is defined as {:?}",
struct_ty_, tag, old_struct_ty_))
panic!("trying to insert {} as {}, while the old struct is defined as {}",
struct_ty_, tag, old_struct_ty_)
}
},
None => {}
......@@ -227,8 +286,29 @@ impl MuType_ {
pub fn array(ty: P<MuType>, len: usize) -> MuType_ {
MuType_::Array(ty, len)
}
pub fn hybrid(fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> MuType_ {
MuType_::Hybrid(fix_tys, var_ty)
pub fn hybrid_empty(tag: HybridTag) -> MuType_ {
let hybrid_ty_ = HybridType_{fix_tys: vec![], var_ty: VOID_TYPE.clone()};
HYBRID_TAG_MAP.write().unwrap().insert(tag.clone(), hybrid_ty_);
MuType_::Hybrid(tag)
}
pub fn hybrid(tag: HybridTag, fix_tys: Vec<P<MuType>>, var_ty: P<MuType>) -> MuType_ {
let hybrid_ty_ = HybridType_{fix_tys: fix_tys, var_ty: var_ty};
match HYBRID_TAG_MAP.read().unwrap().get(&tag) {
Some(old_hybrid_ty_) => {
if hybrid_ty_ != *old_hybrid_ty_ {
panic!("trying to insert {} as {}, while the old hybrid is defined as {}",
hybrid_ty_, tag, old_hybrid_ty_);
}
},
None => {}
}
HYBRID_TAG_MAP.write().unwrap().insert(tag.clone(), hybrid_ty_);
MuType_::Hybrid(tag)
}
pub fn void() -> MuType_ {
MuType_::Void
......@@ -300,7 +380,13 @@ pub fn is_traced(ty: &MuType) -> bool {
MuType_::ThreadRef
| MuType_::StackRef
| MuType_::Tagref64 => true,
MuType_::Hybrid(ref fix_tys, ref var_ty) => {
MuType_::Hybrid(ref tag) => {
let map = HYBRID_TAG_MAP.read().unwrap();
let hybrid_ty = map.get(tag).unwrap();
let ref fix_tys = hybrid_ty.fix_tys;
let ref var_ty = hybrid_ty.var_ty;
is_traced(var_ty) ||
fix_tys.into_iter().map(|ty| is_traced(ty))
.fold(false, |ret, this| ret || this)
......@@ -329,7 +415,13 @@ pub fn is_native_safe(ty: &MuType) -> bool {
| MuType_::Vector(ref elem_ty, _) => is_native_safe(elem_ty),
MuType_::UPtr(_) => true,
MuType_::UFuncPtr(_) => true,
MuType_::Hybrid(ref fix_tys, ref var_ty) => {
MuType_::Hybrid(ref tag) => {
let map = HYBRID_TAG_MAP.read().unwrap();
let hybrid_ty = map.get(tag).unwrap();
let ref fix_tys = hybrid_ty.fix_tys;
let ref var_ty = hybrid_ty.var_ty;
is_native_safe(var_ty) &&
fix_tys.into_iter().map(|ty| is_native_safe(&ty))
.fold(true, |ret, this| ret && this)
......
......@@ -757,7 +757,7 @@ impl <'a> InstructionSelection {
_ => panic!("didnt expect order {:?} with store inst", order)
}
let resolved_loc = self.emit_node_addr_to_value(loc_op, vm);
let resolved_loc = self.emit_node_addr_to_value(loc_op, f_content, f_context, vm);
let res_temp = self.get_result_value(node);
if self.match_ireg(node) {
......@@ -784,19 +784,19 @@ impl <'a> InstructionSelection {
}
};
let resolved_loc = self.emit_node_addr_to_value(loc_op, vm);
if self.match_ireg(val_op) {
let val = self.emit_ireg(val_op, f_content, f_context, vm);
let resolved_loc = self.emit_node_addr_to_value(loc_op, f_content, f_context, vm);
if self.match_iimm(val_op) {
let val = self.node_iimm_to_i32(val_op);
if generate_plain_mov {
self.backend.emit_mov_mem64_r64(&resolved_loc, &val);
self.backend.emit_mov_mem64_imm32(&resolved_loc, val);
} else {
unimplemented!()
}
} else if self.match_iimm(val_op) {
let val = self.node_iimm_to_i32(val_op);
} else if self.match_ireg(val_op) {
let val = self.emit_ireg(val_op, f_content, f_context, vm);
if generate_plain_mov {
self.backend.emit_mov_mem64_imm32(&resolved_loc, val);
self.backend.emit_mov_mem64_r64(&resolved_loc, &val);
} else {
unimplemented!()
}
......@@ -1876,7 +1876,7 @@ impl <'a> InstructionSelection {
}
}
fn emit_node_addr_to_value(&mut self, op: &P<TreeNode>, vm: &VM) -> P<Value> {
fn emit_node_addr_to_value(&mut self, op: &P<TreeNode>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) -> P<Value> {
match op.v {
TreeNode_::Value(ref pv) => {
match pv.v {
......@@ -1910,11 +1910,13 @@ impl <'a> InstructionSelection {
Value_::Constant(_) => unimplemented!()
}
}
TreeNode_::Instruction(_) => self.emit_get_mem_from_inst(op, vm)
TreeNode_::Instruction(_) => self.emit_get_mem_from_inst(op, f_content, f_context, vm)
}
}
fn emit_get_mem_from_inst(&mut self, op: &P<TreeNode>, vm: &VM) -> P<Value> {
fn emit_get_mem_from_inst(&mut self, op: &P<TreeNode>, f_content: &FunctionContent, f_context: &mut FunctionContext, vm: &VM) -> P<Value> {
let header_size = mm::objectmodel::OBJECT_HEADER_SIZE as i32;
match op.v {
TreeNode_::Instruction(ref inst) => {
let ref ops = inst.ops.read().unwrap();
......@@ -1922,8 +1924,44 @@ impl <'a> InstructionSelection {
match inst.v {
Instruction_::GetIRef(op_index) => {
let ref op = ops[op_index];
self.make_memory_op_base_offset(&op.clone_value(), mm::objectmodel::OBJECT_HEADER_SIZE as i32, ADDRESS_TYPE.clone(), vm)
self.make_memory_op_base_offset(&op.clone_value(), header_size, ADDRESS_TYPE.clone(), vm)
}
Instruction_::GetFieldIRef{is_ptr, base, index} => {
let ref base = ops[base];
let struct_ty = {
let ref iref_or_uptr_ty = base.clone_value().ty;
match iref_or_uptr_ty.v {
MuType_::IRef(ref ty)
| MuType_::UPtr(ref ty) => ty.clone(),
_ => panic!("expected the base for GetFieldIRef has a type of iref or uptr, found type: {}", iref_or_uptr_ty)
}
};
let ty_info = vm.get_backend_type_info(struct_ty.id());
let layout = match ty_info.struct_layout.as_ref() {
Some(layout) => layout,
None => panic!("a struct type does not have a layout yet: {:?}", ty_info)
};
debug_assert!(layout.len() > index);
let field_offset : i32 = layout[index] as i32;
match base.v {
TreeNode_::Instruction(Instruction{v: Instruction_::GetIRef(op_index), ref ops, ..}) => {
let ops_guard = ops.read().unwrap();
let ref inner = ops_guard[op_index];
self.make_memory_op_base_offset(&inner.clone_value(), header_size + field_offset, ADDRESS_TYPE.clone(), vm)
},
_ => {
let tmp = self.emit_ireg(base, f_content, f_context, vm);
self.make_memory_op_base_offset(&tmp, field_offset, ADDRESS_TYPE.clone(), vm)
}
}
}
_ => unimplemented!()
}
......
......@@ -99,7 +99,13 @@ pub fn resolve_backend_type_info (ty: &MuType, vm: &VM) -> BackendTypeInfo {
// - align is the most strict aligned element (from all fix tys and var ty)
// - size is fixed tys size
// - layout is fixed tys layout
MuType_::Hybrid(ref fix_tys, ref var_ty) => {
MuType_::Hybrid(ref name) => {
let read_lock = HYBRID_TAG_MAP.read().unwrap();
let hybrid = read_lock.get(name).unwrap();
let fix_tys = hybrid.get_fix_tys();
let var_ty = hybrid.get_var_ty();
// treat fix_tys as struct
let mut ret = layout_struct(fix_tys, vm);
......
......@@ -880,10 +880,11 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
self.struct_id_tags.push((id, tag.clone()));
MuType_::Struct(tag)
},
NodeType::TypeHybrid { id: _, ref fixedtys, varty } => {
let impl_fixedtys = fixedtys.iter().map(|t| self.ensure_type_rec(*t)).collect::<Vec<_>>();
let impl_varty = self.ensure_type_rec(varty);
MuType_::Hybrid(impl_fixedtys, impl_varty)
NodeType::TypeHybrid { id: _, ref fixedtys, varty } => {
unimplemented!()
// let impl_fixedtys = fixedtys.iter().map(|t| self.ensure_type_rec(*t)).collect::<Vec<_>>();
// let impl_varty = self.ensure_type_rec(varty);
// MuType_::Hybrid(impl_fixedtys, impl_varty)
},
NodeType::TypeArray { id: _, elemty, len } => {
let impl_elemty = self.ensure_type_rec(elemty);
......@@ -1529,27 +1530,28 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
}
},
NodeInst::NodeGetFieldIRef { id: _, result_id, is_ptr, refty, index, opnd } => {
let impl_opnd = self.get_treenode(fcb, opnd);
let impl_index = self.ensure_constint_of(index as u64);
let refty_node = self.b.bundle.types.get(&refty).unwrap();
let field_ty_id = match **refty_node {
NodeType::TypeStruct { id: _, ref fieldtys } => {
fieldtys[index as usize]
},
ref t => panic!("GETFIELDIREF {}: Expected struct type. actual: {:?}", id, t)
};
let impl_rvtype = self.ensure_iref_or_uptr(field_ty_id, is_ptr);
let impl_rv = self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
Instruction {
hdr: hdr,
value: Some(vec![impl_rv]),
ops: RwLock::new(vec![impl_opnd, impl_index]),
v: Instruction_::GetFieldIRef {
is_ptr: is_ptr,
base: 0,
index: 1,
},
}
unimplemented!()
// let impl_opnd = self.get_treenode(fcb, opnd);
// let impl_index = self.ensure_constint_of(index as u64);
// let refty_node = self.b.bundle.types.get(&refty).unwrap();
// let field_ty_id = match **refty_node {
// NodeType::TypeStruct { id: _, ref fieldtys } => {
// fieldtys[index as usize]
// },
// ref t => panic!("GETFIELDIREF {}: Expected struct type. actual: {:?}", id, t)
// };
// let impl_rvtype = self.ensure_iref_or_uptr(field_ty_id, is_ptr);
// let impl_rv = self.new_ssa(fcb, result_id, impl_rvtype).clone_value();
// Instruction {
// hdr: hdr,
// value: Some(vec![impl_rv]),
// ops: RwLock::new(vec![impl_opnd, impl_index]),
// v: Instruction_::GetFieldIRef {
// is_ptr: is_ptr,
// base: 0,
// index: 1,
// },
// }
},
NodeInst::NodeGetElemIRef { id: _, result_id, is_ptr, refty, indty: _, opnd, index } => {
let impl_opnd = self.get_treenode(fcb, opnd);
......
......@@ -59,75 +59,95 @@ const VM_SERIALIZE_FIELDS : usize = 13;
impl Encodable for VM {
fn encode<S: Encoder> (&self, s: &mut S) -> Result<(), S::Error> {
let mut field_i = 0;
// serialize VM_SERIALIZE_FIELDS fields
// PLUS ONE extra global STRUCT_TAG_MAP
s.emit_struct("VM", VM_SERIALIZE_FIELDS + 1, |s| {
s.emit_struct("VM", VM_SERIALIZE_FIELDS + 2, |s| {
// next_id
trace!("...serializing next_id");
try!(s.emit_struct_field("next_id", 0, |s| {
try!(s.emit_struct_field("next_id", field_i, |s| {
s.emit_usize(self.next_id.load(Ordering::SeqCst))
}));
field_i += 1;
// id_name_map
trace!("...serializing id_name_map");
{
let map : &HashMap<MuID, MuName> = &self.id_name_map.read().unwrap();
try!(s.emit_struct_field("id_name_map", 1, |s| map.encode(s)));
try!(s.emit_struct_field("id_name_map", field_i, |s| map.encode(s)));
}
field_i += 1;
// name_id_map
trace!("...serializing name_id_map");
{
let map : &HashMap<MuName, MuID> = &self.name_id_map.read().unwrap();
try!(s.emit_struct_field("name_id_map", 2, |s| map.encode(s)));
try!(s.emit_struct_field("name_id_map", field_i, |s| map.encode(s)));
}
field_i += 1;
// types
trace!("...serializing types");
{
let types = &self.types.read().unwrap();
try!(s.emit_struct_field("types", 3, |s| types.encode(s)));
try!(s.emit_struct_field("types", field_i, |s| types.encode(s)));
}
field_i += 1;
// STRUCT_TAG_MAP
trace!("...serializing struct_tag_map");
{
let struct_tag_map = types::STRUCT_TAG_MAP.read().unwrap();
try!(s.emit_struct_field("struct_tag_map", 4, |s| struct_tag_map.encode(s)));
try!(s.emit_struct_field("struct_tag_map", field_i, |s| struct_tag_map.encode(s)));
}
field_i += 1;
// HYBRID_TAG_MAP
trace!("...serializing hybrid_tag_map");
{
let hybrid_tag_map = types::HYBRID_TAG_MAP.read().unwrap();
try!(s.emit_struct_field("hybrid_tag_map", field_i, |s| hybrid_tag_map.encode(s)));
}
field_i += 1;
// backend_type_info
trace!("...serializing backend_type_info");
{
let backend_type_info : &HashMap<_, _> = &self.backend_type_info.read().unwrap();
try!(s.emit_struct_field("backend_type_info", 5, |s| backend_type_info.encode(s)));
try!(s.emit_struct_field("backend_type_info", field_i, |s| backend_type_info.encode(s)));
}
field_i += 1;
// constants
trace!("...serializing constants");
{
let constants : &HashMap<_, _> = &self.constants.read().unwrap();
try!(s.emit_struct_field("constants", 6, |s| constants.encode(s)));
try!(s.emit_struct_field("constants", field_i, |s| constants.encode(s)));
}
field_i += 1;
// globals
trace!("...serializing globals");
{
let globals: &HashMap<_, _> = &self.globals.read().unwrap();
try!(s.emit_struct_field("globals", 7, |s| globals.encode(s)));
try!(s.emit_struct_field("globals", field_i, |s| globals.encode(s)));
}
field_i += 1;
// func sigs
trace!("...serializing func_sigs");
{
let func_sigs: &HashMap<_, _> = &self.func_sigs.read().unwrap();
try!(s.emit_struct_field("func_sigs", 8, |s| func_sigs.encode(s)));
try!(s.emit_struct_field("func_sigs", field_i, |s| func_sigs.encode(s)));
}
field_i += 1;
// funcs
trace!("...serializing funcs");
{
let funcs : &HashMap<_, _> = &self.funcs.read().unwrap();
try!(s.emit_struct_field("funcs", 9, |s| {
try!(s.emit_struct_field("funcs", field_i, |s| {
s.emit_map(funcs.len(), |s| {
let mut i = 0;
for (k,v) in funcs.iter() {
......@@ -140,12 +160,13 @@ impl Encodable for VM {
})
}));
}
field_i += 1;
// func_vers
trace!("...serializing func_vers");
{
let func_vers : &HashMap<_, _> = &self.func_vers.read().unwrap();
try!(s.emit_struct_field("func_vers", 10, |s| {
try!(s.emit_struct_field("func_vers", field_i, |s| {
s.emit_map(func_vers.len(), |s| {
let mut i = 0;
for (k, v) in func_vers.iter() {
......@@ -158,25 +179,28 @@ impl Encodable for VM {
})
}));
}
field_i += 1;
// primordial
trace!("...serializing primordial");
{
let primordial = &self.primordial.read().unwrap();
try!(s.emit_struct_field("primordial", 11, |s| primordial.encode(s)));
try!(s.emit_struct_field("primordial", field_i, |s| primordial.encode(s)));
}
field_i += 1;
// is_running
trace!("...serializing is_running");
{
try!(s.emit_struct_field("is_running", 12, |s| self.is_running.load(Ordering::SeqCst).encode(s)));
try!(s.emit_struct_field("is_running", field_i, |s| self.is_running.load(Ordering::SeqCst).encode(s)));
}
field_i += 1;
// compiled_funcs
trace!("...serializing compiled_funcs");
{
let compiled_funcs : &HashMap<_, _> = &self.compiled_funcs.read().unwrap();
try!(s.emit_struct_field("compiled_funcs", 13, |s| {
try!(s.emit_struct_field("compiled_funcs", field_i, |s| {
s.emit_map(compiled_funcs.len(), |s| {
let mut i = 0;
for (k, v) in compiled_funcs.iter() {
......@@ -189,6 +213,7 @@ impl Encodable for VM {
})
}));
}
field_i += 1;
trace!("serializing finished");
Ok(())
......@@ -198,45 +223,69 @@ impl Encodable for VM {
impl Decodable for VM {
fn decode<D: Decoder>(d: &mut D) -> Result<VM, D::Error> {
d.read_struct("VM", VM_SERIALIZE_FIELDS + 1, |d| {
let mut field_i = 0;
d.read_struct("VM", VM_SERIALIZE_FIELDS + 2, |d