Commit d9f4e1b1 authored by qinsoon's avatar qinsoon

[wip] primordial thread launch, up to thread entry

FIXME: GC settings such as heap size, n_gcthreads, should be properly serialized
parent b814438f
......@@ -22,4 +22,5 @@ memsec = "0.1.9"
rustc-serialize = "*"
time = "0.1.34"
aligned_alloc = "0.1.2"
crossbeam = "0.2.8"
\ No newline at end of file
crossbeam = "0.2.8"
byteorder = "0.5.3"
\ No newline at end of file
......@@ -4,5 +4,5 @@ extern crate gcc;
#[cfg(target_arch = "x86_64")]
fn main() {
gcc::compile_library("libruntime.a", &["src/runtime/runtime_x64_macos.c"]);
gcc::compile_library("libgc_clib_x64.a", &["src/runtime/mem/heap/gc/clib_x64.c"]);
gcc::compile_library("libgc_clib_x64.a", &["src/runtime/mm/heap/gc/clib_x64.c"]);
}
\ No newline at end of file
......@@ -3,6 +3,8 @@ extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate rustc_serialize;
extern crate byteorder;
extern crate simple_logger;
#[macro_use]
pub mod utils;
......
use runtime::mem::common::LOG_POINTER_SIZE;
use runtime::mem::common::Address;
use runtime::mem::common::bitmap::Bitmap;
use runtime::mm::common::LOG_POINTER_SIZE;
use runtime::mm::common::Address;
use runtime::mm::common::bitmap::Bitmap;
#[derive(Clone)]
pub struct AddressBitmap {
......
use std::mem;
use runtime::mem::common::LOG_POINTER_SIZE;
use runtime::mem::common::Address;
use runtime::mem::heap::gc::malloc_zero;
use runtime::mm::common::LOG_POINTER_SIZE;
use runtime::mm::common::Address;
use runtime::mm::heap::gc::malloc_zero;
#[derive(Clone)]
pub struct AddressMap<T: Copy> {
......
use std::mem;
use runtime::mem::heap::gc::malloc_zero;
use runtime::mm::heap::gc::malloc_zero;
#[derive(Clone)]
pub struct Bitmap {
......
use runtime::mem::common::Address;
use runtime::mem::heap::immix;
use runtime::mm::common::Address;
use runtime::mm::heap::immix;
extern crate aligned_alloc;
......@@ -121,7 +121,7 @@ pub fn alloc_large(size: usize, align: usize, mutator: &mut immix::ImmixMutatorL
return addr;
},
None => {
use runtime::mem::heap::gc;
use runtime::mm::heap::gc;
gc::trigger_gc();
}
}
......
use runtime::mem::heap::immix::MUTATORS;
use runtime::mem::heap::immix::N_MUTATORS;
use runtime::mem::heap::immix::ImmixMutatorLocal;
use runtime::mem::heap::immix::ImmixSpace;
use runtime::mem::heap::immix::ImmixLineMarkTable;
use runtime::mem::heap::freelist::FreeListSpace;
use runtime::mem::objectmodel;
use runtime::mem::common;
use runtime::mem::common::{Address, ObjectReference};
use runtime::mem::common::AddressMap;
use runtime::mm::heap::immix::MUTATORS;
use runtime::mm::heap::immix::N_MUTATORS;
use runtime::mm::heap::immix::ImmixMutatorLocal;
use runtime::mm::heap::immix::ImmixSpace;
use runtime::mm::heap::immix::ImmixLineMarkTable;
use runtime::mm::heap::freelist::FreeListSpace;
use runtime::mm::objectmodel;
use runtime::mm::common;
use runtime::mm::common::{Address, ObjectReference};
use runtime::mm::common::AddressMap;
use std::sync::atomic::{AtomicIsize, Ordering};
use std::sync::{Arc, Mutex, Condvar, RwLock};
......@@ -298,7 +298,7 @@ pub fn start_trace(work_stack: &mut Vec<ObjectReference>, immix_space: Arc<Immix
#[inline(never)]
#[cfg(not(feature = "mt-trace"))]
pub fn start_trace(local_queue: &mut Vec<ObjectReference>, immix_space: Arc<ImmixSpace>, lo_space: Arc<RwLock<FreeListSpace>>) {
use runtime::mem::objectmodel;
use runtime::mm::objectmodel;
let mark_state = objectmodel::MARK_STATE.load(Ordering::SeqCst) as u8;
while !local_queue.is_empty() {
......@@ -401,7 +401,7 @@ pub fn steal_process_edge(addr: Address, local_queue:&mut Vec<ObjectReference>,
#[inline(always)]
pub fn trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectReference>, alloc_map: *mut u8, trace_map: *mut u8, line_mark_table: &ImmixLineMarkTable, immix_start: Address, immix_end: Address, mark_state: u8) {
use runtime::mem::objectmodel;
use runtime::mm::objectmodel;
objectmodel::mark_as_traced(trace_map, immix_start, obj, mark_state);
......@@ -448,7 +448,7 @@ pub fn trace_object(obj: ObjectReference, local_queue: &mut Vec<ObjectReference>
#[inline(always)]
pub fn process_edge(addr: Address, local_queue:&mut Vec<ObjectReference>, trace_map: *mut u8, space_start: Address, mark_state: u8) {
use runtime::mem::objectmodel;
use runtime::mm::objectmodel;
let obj_addr : ObjectReference = unsafe{addr.load()};
......
use runtime::mem::heap::immix;
use runtime::mem::heap::immix::ImmixSpace;
use runtime::mem::heap::immix::immix_space::ImmixBlock;
use runtime::mem::heap::gc;
use runtime::mm::heap::immix;
use runtime::mm::heap::immix::ImmixSpace;
use runtime::mm::heap::immix::immix_space::ImmixBlock;
use runtime::mm::heap::gc;
use runtime::mem::common::LOG_POINTER_SIZE;
use runtime::mem::common::Address;
use runtime::mm::common::LOG_POINTER_SIZE;
use runtime::mm::common::Address;
use std::*;
use std::sync::Arc;
......
use runtime::mem::heap::immix;
use runtime::mem::heap::gc;
use runtime::mem::common::Address;
use runtime::mem::common::AddressMap;
use runtime::mem::heap::gc::malloc_zero;
use runtime::mm::heap::immix;
use runtime::mm::heap::gc;
use runtime::mm::common::Address;
use runtime::mm::common::AddressMap;
use runtime::mm::heap::gc::malloc_zero;
extern crate std;
extern crate memmap;
......
use runtime::mem::common::Address;
use runtime::mm::common::Address;
use std::sync::atomic::AtomicUsize;
pub mod immix;
......
......@@ -4,12 +4,12 @@ pub mod common;
pub mod objectmodel;
pub mod heap;
pub use runtime::mem::heap::immix::ImmixMutatorLocal as Mutator;
use runtime::mem::common::ObjectReference;
use runtime::mem::heap::immix::ImmixSpace;
use runtime::mem::heap::immix::ImmixMutatorLocal;
use runtime::mem::heap::freelist;
use runtime::mem::heap::freelist::FreeListSpace;
pub use runtime::mm::heap::immix::ImmixMutatorLocal as Mutator;
use runtime::mm::common::ObjectReference;
use runtime::mm::heap::immix::ImmixSpace;
use runtime::mm::heap::immix::ImmixMutatorLocal;
use runtime::mm::heap::freelist;
use runtime::mm::heap::freelist::FreeListSpace;
use std::sync::Arc;
use std::sync::RwLock;
......
use std::sync::atomic;
pub static MARK_STATE : atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
use runtime::mem::common::ObjectReference;
use runtime::mem::common::Address;
use runtime::mem::common::LOG_POINTER_SIZE;
use runtime::mm::common::ObjectReference;
use runtime::mm::common::Address;
use runtime::mm::common::LOG_POINTER_SIZE;
pub fn init() {
MARK_STATE.store(1, atomic::Ordering::SeqCst);
......
pub mod mem;
pub mod mm;
pub mod thread;
pub use runtime::mem::common::Address;
pub use runtime::mem::common::ObjectReference;
pub use runtime::mm::common::Address;
pub use runtime::mm::common::ObjectReference;
use utils;
use ast::ir::*;
use compiler::backend::Word;
use compiler::backend::RegGroup;
......@@ -24,14 +24,23 @@ impl ValueLocation {
match self {
&ValueLocation::Register(_, _)
| &ValueLocation::Direct(_, _)
| &ValueLocation::Indirect(_, _)
| &ValueLocation::Constant(_, _) => unimplemented!(),
| &ValueLocation::Indirect(_, _) => unimplemented!(),
&ValueLocation::Constant(group, word) => {
(group, word)
}
&ValueLocation::Relocatable(_, _) => panic!("expect a runtime value")
}
}
#[allow(unused_variables)]
pub fn from_constant(c: Constant) -> ValueLocation {
unimplemented!()
match c {
Constant::Int(int_val) => ValueLocation::Constant(RegGroup::GPR, utils::mem::u64_to_raw(int_val)),
Constant::Float(f32_val) => ValueLocation::Constant(RegGroup::FPR, utils::mem::f32_to_raw(f32_val)),
Constant::Double(f64_val) => ValueLocation::Constant(RegGroup::FPR, utils::mem::f64_to_raw(f64_val)),
_ => unimplemented!()
}
}
}
\ No newline at end of file
......@@ -5,7 +5,7 @@ use ast::ptr::*;
use ast::types::*;
use vm::VM;
use runtime::ValueLocation;
use runtime::mem as gc;
use runtime::mm;
use utils::ByteSize;
use utils::Address;
......@@ -167,7 +167,7 @@ pub enum MuStackState {
pub struct MuThread {
pub hdr: MuEntityHeader,
allocator: Box<gc::Mutator>,
allocator: Box<mm::Mutator>,
stack: Option<Box<MuStack>>,
user_tls: Option<Address>
......@@ -181,7 +181,7 @@ extern "C" {
}
impl MuThread {
pub fn new(id: MuID, allocator: Box<gc::Mutator>, stack: Box<MuStack>, user_tls: Option<Address>) -> MuThread {
pub fn new(id: MuID, allocator: Box<mm::Mutator>, stack: Box<MuStack>, user_tls: Option<Address>) -> MuThread {
MuThread {
hdr: MuEntityHeader::unnamed(id),
allocator: allocator,
......@@ -194,7 +194,7 @@ impl MuThread {
#[allow(unused_variables)]
pub extern fn mu_thread_launch(id: MuID, stack: Box<MuStack>, user_tls: Option<Address>, vm: &VM) -> JoinHandle<()> {
match thread::Builder::new().name(format!("Mu Thread #{}", id)).spawn(move || {
let muthread = Box::new(MuThread::new(id, gc::new_mutator(), stack, user_tls));
let muthread = Box::new(MuThread::new(id, mm::new_mutator(), stack, user_tls));
// set thread local
let addr = unsafe {init_thread_local(&muthread)};
......
#![allow(unused_imports)]
pub extern crate memmap;
pub extern crate memsec;
\ No newline at end of file
pub extern crate memsec;
use compiler::backend::Word;
use byteorder::{LittleEndian, BigEndian, ReadBytesExt, WriteBytesExt, ByteOrder};
#[cfg(target_arch = "x86_64")]
pub fn u64_to_raw(val: u64) -> Word {
let mut ret = vec![];
ret.write_u64::<LittleEndian>(val).unwrap();
as_word(ret)
}
#[cfg(target_arch = "x86_64")]
pub fn f32_to_raw(val: f32) -> Word {
let mut ret = vec![];
ret.write_f32::<LittleEndian>(val).unwrap();
as_word(ret)
}
#[cfg(target_arch = "x86_64")]
pub fn f64_to_raw(val: f64) -> Word {
let mut ret = vec![];
ret.write_f64::<LittleEndian>(val).unwrap();
as_word(ret)
}
#[cfg(target_arch = "x86_64")]
pub fn as_word(mut u8_array: Vec<u8>) -> Word {
LittleEndian::read_uint(&mut u8_array, 8) as Word
}
#[cfg(test)]
mod tests{
use super::*;
use compiler::backend::Word;
#[test]
fn test_primitive_to_raw() {
let a : Word = 0xabcd;
let raw = u64_to_raw(a as u64);
assert_eq!(raw, a);
}
}
\ No newline at end of file
......@@ -11,16 +11,19 @@ use vm::vm_options::VMOptions;
use runtime::thread::*;
use runtime::ValueLocation;
use utils::Address;
use runtime::mem as gc;
use runtime::mm as gc;
use log;
use simple_logger;
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
use std::path;
use std::sync::RwLock;
use std::sync::atomic::{AtomicUsize, AtomicBool, ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT, Ordering};
use std::thread::JoinHandle;
use std::os::raw::c_char;
use std::sync::Arc;
use std::os::raw::c_char;
use std::ffi::CStr;
pub struct VM {
// serialize
......@@ -292,6 +295,17 @@ impl <'a> VM {
ret
}
pub fn resume_vm(serialized_vm: &str) -> VM {
use rustc_serialize::json;
let vm = json::decode(serialized_vm).unwrap();
let options = VMOptions::default();
gc::gc_init(options.immix_size, options.lo_size, options.n_gcthreads);
vm
}
pub fn next_id(&self) -> MuID {
self.next_id.fetch_add(1, Ordering::SeqCst)
}
......@@ -498,14 +512,19 @@ impl <'a> VM {
}
#[no_mangle]
pub extern fn mu_main(serialized_vm : *const c_char, len: usize) {
use rustc_serialize::json;
println!("mu_main() started...");
pub extern fn mu_trace_level_log() {
simple_logger::init_with_level(log::LogLevel::Trace).ok();
}
#[no_mangle]
pub extern fn mu_main(serialized_vm : *const c_char) {
debug!("mu_main() started...");
let str_vm = unsafe {String::from_raw_parts(serialized_vm as *mut u8, len, len)};
// clone it, otherwise rust allocator will try deallocate
// since the char* points to data section, the deallocation will fail
let str_vm = unsafe{CStr::from_ptr(serialized_vm)}.to_str().unwrap();
let vm : Arc<VM> = Arc::new(json::decode(&str_vm).unwrap());
let vm : Arc<VM> = Arc::new(VM::resume_vm(str_vm));
let primordial = vm.primordial.read().unwrap();
if primordial.is_none() {
......
use mu::runtime::mem;
use mu::runtime::mem::heap;
use mu::runtime::mem::heap::immix::ImmixMutatorLocal;
use mu::runtime::mem::heap::immix::ImmixSpace;
use mu::runtime::mem::heap::freelist::FreeListSpace;
use mu::runtime::mem::common::Address;
use mu::runtime::mem::common::ObjectReference;
use mu::runtime::mem::objectmodel;
use mu::runtime::mm;
use mu::runtime::mm::heap;
use mu::runtime::mm::heap::immix::ImmixMutatorLocal;
use mu::runtime::mm::heap::immix::ImmixSpace;
use mu::runtime::mm::heap::freelist::FreeListSpace;
use mu::runtime::mm::common::Address;
use mu::runtime::mm::common::ObjectReference;
use mu::runtime::mm::objectmodel;
use std::sync::RwLock;
use std::sync::Arc;
......@@ -21,8 +21,8 @@ const LO_SPACE_SIZE : usize = 500 << 20;
#[test]
fn test_exhaust_alloc() {
mem::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mem::new_mutator();
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
println!("Trying to allocate {} objects of (size {}, align {}). ", WORK_LOAD, OBJECT_SIZE, OBJECT_ALIGN);
const ACTUAL_OBJECT_SIZE : usize = OBJECT_SIZE;
......
......@@ -4,12 +4,12 @@
#![allow(unused_variables)]
#![allow(dead_code)]
use mu::runtime::mem;
use mu::runtime::mem::heap;
use mu::runtime::mem::heap::immix::ImmixMutatorLocal;
use mu::runtime::mem::heap::immix::ImmixSpace;
use mu::runtime::mem::heap::freelist;
use mu::runtime::mem::heap::freelist::FreeListSpace;
use mu::runtime::mm;
use mu::runtime::mm::heap;
use mu::runtime::mm::heap::immix::ImmixMutatorLocal;
use mu::runtime::mm::heap::immix::ImmixSpace;
use mu::runtime::mm::heap::freelist;
use mu::runtime::mm::heap::freelist::FreeListSpace;
use std::mem::size_of;
use std::sync::atomic::Ordering;
......@@ -114,8 +114,8 @@ fn alloc(mutator: &mut ImmixMutatorLocal) -> *mut Node {
fn start() {
unsafe {heap::gc::set_low_water_mark();}
mem::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mem::new_mutator();
mm::gc_init(IMMIX_SPACE_SIZE, LO_SPACE_SIZE, 8);
let mut mutator = mm::new_mutator();
println!("Garbage Collector Test");
println!(" Live storage will peak at {} bytes.\n",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment