Commit 69d76617 authored by qinsoon's avatar qinsoon

[wip] copy gc code here (instead of submodules)

gonna port tests to this repo
parent c29c8471
......@@ -2,3 +2,4 @@ target/*
emit/*
Cargo.lock
*.log
*.DS_Store
[submodule "gc"]
path = gc
url = git@gitlab.anu.edu.au:mu/immix-rust.git
......@@ -5,9 +5,6 @@ version = "0.0.1"
authors = [ "Your name <you@example.com>" ]
build = "build.rs"
[dependencies.immix_rust]
path = "gc"
[build-dependencies.gcc]
git = "https://github.com/alexcrichton/gcc-rs"
......@@ -20,4 +17,8 @@ linked-hash-map = "0.0.10"
hprof = "0.1.3"
memmap = "0.4.0"
memsec = "0.1.9"
rustc-serialize = "*"
\ No newline at end of file
rustc-serialize = "*"
time = "0.1.34"
aligned_alloc = "0.1.2"
crossbeam = "0.2.8"
\ No newline at end of file
......@@ -3,5 +3,6 @@ extern crate gcc;
#[cfg(target_os = "macos")]
#[cfg(target_arch = "x86_64")]
fn main() {
gcc::compile_library("libruntime.a", &["src/runtime/runtime_x64_macos.c"]);
gcc::compile_library("libruntime.a", &["src/runtime/runtime_x64_macos.c"]);
gcc::compile_library("libgc_clib_x64.a", &["src/runtime/mem/heap/gc/clib_x64.c"]);
}
\ No newline at end of file
Subproject commit 7fc9418a33982ac744c19c5ba65d096b43b5b0dc
......@@ -9,7 +9,6 @@ use std::collections::HashMap;
use std::fmt;
use std::default;
use std::sync::RwLock;
use std::cell::Cell;
use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
pub type WPID = usize;
......
......@@ -2,7 +2,6 @@
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate immix_rust as gc;
extern crate rustc_serialize;
#[macro_use]
......
use runtime::mem::common::LOG_POINTER_SIZE;
use runtime::mem::common::Address;
use runtime::mem::common::bitmap::Bitmap;
#[derive(Clone)]
pub struct AddressBitmap {
start : Address,
end : Address,
bitmap: Bitmap
}
impl AddressBitmap {
pub fn new(start: Address, end: Address) -> AddressBitmap {
let bitmap_len = end.diff(start) >> LOG_POINTER_SIZE;
let bitmap = Bitmap::new(bitmap_len);
AddressBitmap{start: start, end: end, bitmap: bitmap}
}
#[inline(always)]
#[allow(mutable_transmutes)]
pub unsafe fn set_bit(&self, addr: Address) {
use std::mem;
let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap);
mutable_bitmap.set_bit(addr.diff(self.start) >> LOG_POINTER_SIZE);
}
#[inline(always)]
#[allow(mutable_transmutes)]
pub unsafe fn clear_bit(&self, addr: Address) {
use std::mem;
let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap);
mutable_bitmap.clear_bit(addr.diff(self.start) >> LOG_POINTER_SIZE);
}
#[inline(always)]
pub fn test_bit(&self, addr: Address) -> bool {
self.bitmap.test_bit(addr.diff(self.start) >> LOG_POINTER_SIZE)
}
#[inline(always)]
pub fn length_until_next_bit(&self, addr: Address) -> usize {
self.bitmap.length_until_next_bit(addr.diff(self.start) >> LOG_POINTER_SIZE)
}
#[inline(always)]
#[allow(mutable_transmutes)]
pub unsafe fn set(&self, addr: Address, value: u64, length: usize) {
use std::mem;
if cfg!(debug_assertions) {
assert!(addr >= self.start && addr <= self.end);
}
let index = addr.diff(self.start) >> LOG_POINTER_SIZE;
let mutable_bitmap : &mut Bitmap = mem::transmute(&self.bitmap);
mutable_bitmap.set(index, value, length);
}
#[inline(always)]
pub fn get(&self, addr: Address, length: usize) -> u64 {
if cfg!(debug_assertions) {
assert!(addr >= self.start && addr <= self.end);
}
let index = addr.diff(self.start) >> LOG_POINTER_SIZE;
self.bitmap.get(index, length)
}
pub fn print(&self) {
self.bitmap.print();
}
}
\ No newline at end of file
use std::mem;
use runtime::mem::common::LOG_POINTER_SIZE;
use runtime::mem::common::Address;
use runtime::mem::heap::gc::malloc_zero;
#[derive(Clone)]
pub struct AddressMap<T: Copy> {
start : Address,
end : Address,
pub ptr : *mut T,
len : usize
}
impl <T> AddressMap<T> where T: Copy{
pub fn new(start: Address, end: Address) -> AddressMap<T> {
let len = end.diff(start) >> LOG_POINTER_SIZE;
let ptr = unsafe{malloc_zero(mem::size_of::<T>() * len)} as *mut T;
AddressMap{start: start, end: end, ptr: ptr, len: len}
}
#[inline(always)]
pub fn set(&self, addr: Address, value: T) {
let index = (addr.diff(self.start) >> LOG_POINTER_SIZE) as isize;
unsafe{*self.ptr.offset(index) = value};
}
#[inline(always)]
pub fn get(&self, addr: Address) -> T {
let index = (addr.diff(self.start) >> LOG_POINTER_SIZE) as isize;
unsafe {*self.ptr.offset(index)}
}
}
\ No newline at end of file
use std::mem;
use runtime::mem::heap::gc::malloc_zero;
#[derive(Clone)]
pub struct Bitmap {
bitmap : *mut u64,
bitmap_len : usize
}
impl Bitmap {
pub fn new(length: usize) -> Bitmap {
let bitmap_len = length;
let bitmap = unsafe {
// secretly reserve one more word
malloc_zero(mem::size_of::<u64>() * ((bitmap_len >> 6) + 1)) as *mut u64
};
Bitmap{bitmap: bitmap, bitmap_len: bitmap_len}
}
#[inline(always)]
pub fn set_bit(&mut self, index: usize) {
let word = unsafe{self.bitmap.offset((index >> 6) as isize)};
unsafe {*word = *word | (1 << (index & 63))};
}
#[inline(always)]
pub fn clear_bit(&mut self, index: usize) {
let word = unsafe {self.bitmap.offset((index >> 6) as isize)};
unsafe {*word = *word & (0 << (index & 63))};
}
#[inline(always)]
pub fn test_bit(&self, index: usize) -> bool{
let word = unsafe {self.bitmap.offset((index >> 6) as isize)};
unsafe {(*word & (1 << (index & 63))) != 0}
}
#[inline(always)]
pub fn length_until_next_bit(&self, index: usize) -> usize {
let mut len = 1;
while index + len < self.bitmap_len {
if self.test_bit(index + len) {
return len;
} else {
len += 1;
continue;
}
}
return 0;
}
#[inline(always)]
pub fn set(&mut self, index: usize, value: u64, length: usize) {
if cfg!(debug_assertions) {
assert!(index < self.bitmap_len);
assert!(length <= 64);
}
let nth_u64 = index >> 6;
let nth_bit = index & 63;
let word = unsafe {self.bitmap.offset(nth_u64 as isize)};
if length <= 64 - nth_bit {
unsafe {
*word = *word | (value << nth_bit);
}
} else {
unsafe {
let next_word = self.bitmap.offset(nth_u64 as isize + 1);
*word = *word | (value.wrapping_shl(nth_bit as u32));
*next_word = *next_word | (value >> (64 - nth_bit));
}
}
}
#[inline(always)]
pub fn get(&self, index: usize, length: usize) -> u64 {
if cfg!(debug_assertions) {
assert!(index < self.bitmap_len);
assert!(length <= 64);
}
let nth_u64 = index >> 6;
let nth_bit = index % 64;
let word = unsafe {self.bitmap.offset(nth_u64 as isize)};
if length <= 64 - nth_bit {
((unsafe {*word}) >> nth_bit) & ((1 << length) - 1)
} else {
unsafe {
let next_word = self.bitmap.offset(nth_u64 as isize + 1);
let part1 = *word >> nth_bit;
let part2 = (*next_word & ( (1 << (nth_bit + length - 64)) - 1 )) << (64 - nth_bit);
part1 | part2
}
}
}
pub fn print(&self) {
let mut ptr = self.bitmap;
let nwords = {
if self.bitmap_len / 64 == 0 {
1
} else {
self.bitmap_len / 64
}
};
for i in 0..nwords {
println!("{}\t0b{:64b}", i * 64, unsafe {*ptr});
ptr = unsafe{ptr.offset(1)};
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_bit() {
let mut bitmap = Bitmap::new(64);
bitmap.set_bit(3);
bitmap.print();
assert!(bitmap.test_bit(3));
bitmap.clear_bit(3);
assert!(!bitmap.test_bit(3));
bitmap.set_bit(3);
bitmap.set_bit(4);
bitmap.set_bit(6);
bitmap.print();
assert_eq!(bitmap.length_until_next_bit(3), 1);
assert_eq!(bitmap.length_until_next_bit(4), 2);
assert_eq!(bitmap.length_until_next_bit(5), 1);
assert_eq!(bitmap.length_until_next_bit(6), 0);
}
}
\ No newline at end of file
use std::cmp;
use std::fmt;
use std::mem;
mod bitmap;
mod address_bitmap;
mod address_map;
pub use self::address_bitmap::AddressBitmap;
pub use self::address_map::AddressMap;
pub const LOG_POINTER_SIZE : usize = 3;
pub const POINTER_SIZE : usize = 1 << LOG_POINTER_SIZE;
#[repr(C)]
#[derive(Copy, Clone, Eq, Hash)]
pub struct Address(usize);
impl Address {
#[inline(always)]
pub fn plus(&self, bytes: usize) -> Self {
Address(self.0 + bytes)
}
#[allow(dead_code)]
#[inline(always)]
pub fn sub(&self, bytes: usize) -> Self {
Address(self.0 - bytes)
}
#[inline(always)]
pub fn offset<T>(&self, offset: isize) -> Self {
Address((self.0 as isize + mem::size_of::<T>() as isize * offset) as usize)
}
#[inline(always)]
pub fn diff(&self, another: Address) -> usize {
debug_assert!(self.0 >= another.0, "for a.diff(b), a needs to be larger than b");
self.0 - another.0
}
#[inline(always)]
pub unsafe fn load<T: Copy> (&self) -> T {
*(self.0 as *mut T)
}
#[inline(always)]
pub unsafe fn store<T> (&self, value: T) {
*(self.0 as *mut T) = value;
}
#[inline(always)]
pub fn is_zero(&self) -> bool {
self.0 == 0
}
#[inline(always)]
pub fn align_up(&self, align: usize) -> Address {
Address((self.0 + align - 1) & !(align - 1))
}
pub fn is_aligned_to(&self, align: usize) -> bool {
self.0 % align == 0
}
pub fn memset(&self, char: u8, length: usize) {
let mut cur : *mut u8 = self.0 as *mut u8;
for _ in 0..length {
unsafe {
*cur = char;
cur = cur.offset(1);
}
}
}
#[inline(always)]
pub unsafe fn to_object_reference(&self) -> ObjectReference {
mem::transmute(self.0)
}
#[inline(always)]
pub fn from_ptr<T> (ptr: *const T) -> Address {
unsafe {mem::transmute(ptr)}
}
#[inline(always)]
pub fn to_ptr<T> (&self) -> *const T {
unsafe {mem::transmute(self.0)}
}
#[inline(always)]
pub fn to_ptr_mut<T> (&self) -> *mut T {
unsafe {mem::transmute(self.0)}
}
#[inline(always)]
pub fn as_usize(&self) -> usize {
self.0
}
#[inline(always)]
pub unsafe fn zero() -> Address {
Address(0)
}
}
impl PartialOrd for Address {
#[inline(always)]
fn partial_cmp(&self, other: &Address) -> Option<cmp::Ordering> {
Some(self.0.cmp(& other.0))
}
}
impl PartialEq for Address {
#[inline(always)]
fn eq(&self, other: &Address) -> bool {
self.0 == other.0
}
#[inline(always)]
fn ne(&self, other: &Address) -> bool {
self.0 != other.0
}
}
impl fmt::UpperHex for Address {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:X}", self.0)
}
}
impl fmt::Display for Address {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:X}", self.0)
}
}
impl fmt::Debug for Address {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:X}", self.0)
}
}
#[derive(Copy, Clone, Eq, Hash)]
pub struct ObjectReference (usize);
impl ObjectReference {
#[inline(always)]
pub fn to_address(&self) -> Address {
unsafe {mem::transmute(self.0)}
}
#[inline(always)]
pub fn is_null(&self) -> bool {
self.0 != 0
}
pub fn value(&self) -> usize {
self.0
}
}
impl PartialOrd for ObjectReference {
#[inline(always)]
fn partial_cmp(&self, other: &ObjectReference) -> Option<cmp::Ordering> {
Some(self.0.cmp(& other.0))
}
}
impl PartialEq for ObjectReference {
#[inline(always)]
fn eq(&self, other: &ObjectReference) -> bool {
self.0 == other.0
}
#[inline(always)]
fn ne(&self, other: &ObjectReference) -> bool {
self.0 != other.0
}
}
impl fmt::UpperHex for ObjectReference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:X}", self.0)
}
}
impl fmt::Display for ObjectReference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:X}", self.0)
}
}
impl fmt::Debug for ObjectReference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{:X}", self.0)
}
}
#[inline(always)]
pub fn test_nth_bit(value: u8, index: usize) -> bool {
value & (1 << index) != 0
}
#[inline(always)]
pub fn lower_bits(value: u8, len: usize) -> u8 {
value & ((1 << len) - 1)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn test_u8_bits() {
let value : u8 = 0b1100_0011;
assert_eq!(test_nth_bit(value, 6), true);
assert_eq!(lower_bits(value, 6), 0b00_0011);
}
}
\ No newline at end of file
use runtime::mem::common::Address;
use runtime::mem::heap::immix;
extern crate aligned_alloc;
use std::collections::LinkedList;
use std::sync::Arc;
use std::sync::RwLock;
pub struct FreeListSpace {
current_nodes : LinkedList<Box<FreeListNode>>,
node_id: usize,
size : usize,
used_bytes : usize
}
impl FreeListSpace {
pub fn new(size: usize) -> FreeListSpace {
FreeListSpace {
current_nodes: LinkedList::new(),
node_id: 0,
size: size,
used_bytes: 0
}
}
#[allow(unused_variables)]
pub fn mark(&mut self, obj: Address) {
}
pub fn alloc(&mut self, size: usize, align: usize) -> Option<Address> {
if self.used_bytes + size > self.size {
None
} else {
let ret = self::aligned_alloc::aligned_alloc(size, align);
let addr = Address::from_ptr::<()>(ret);
self.current_nodes.push_front(Box::new(FreeListNode{id: self.node_id, start: addr, size: size, mark: NodeMark::FreshAlloc}));
self.node_id += 1;
self.used_bytes += size;
Some(addr)
}
}
pub fn sweep(&mut self) {
let (new_nodes, new_used_bytes) = {
let mut ret = LinkedList::new();
let nodes = &mut self.current_nodes;
let mut used_bytes = 0;
while !nodes.is_empty() {
let mut node = nodes.pop_front().unwrap();
match node.mark {
NodeMark::Live => {
node.set_mark(NodeMark::PrevLive);
used_bytes += node.size;
ret.push_back(node);
},
NodeMark::PrevLive | NodeMark::FreshAlloc => {
let ptr = node.start.to_ptr::<()>() as *mut ();
// free the memory
unsafe {self::aligned_alloc::aligned_free(ptr);}
// do not add this node into new linked list
}
}
}
(ret, used_bytes)
};
self.current_nodes = new_nodes;
self.used_bytes = new_used_bytes;
}
pub fn current_nodes(&self) -> &LinkedList<Box<FreeListNode>> {
&self.current_nodes
}
pub fn current_nodes_mut(&mut self) -> &mut LinkedList<Box<FreeListNode>> {
&mut self.current_nodes
}
}
pub struct FreeListNode {
id: usize,
start : Address,
size : usize,
mark : NodeMark
}
impl FreeListNode {
pub fn set_mark(&mut self, mark: NodeMark) {
self.mark = mark;
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub enum NodeMark {
FreshAlloc,
PrevLive,
Live,
}
unsafe impl Sync for NodeMark {}
#[inline(never)]
pub fn alloc_large(size: usize, align: usize, mutator: &mut immix::ImmixMutatorLocal, space: Arc<RwLock<FreeListSpace>>) -> Address {
loop {
mutator.yieldpoint();
let ret_addr = {
let mut lo_space_lock = space.write().unwrap();
lo_space_lock.alloc(size, align)
};
match ret_addr {
Some(addr) => {
return addr;
},
None => {
use runtime::mem::heap::gc;
gc::trigger_gc();
}
}
}
}
use std::fmt;
impl fmt::Display for FreeListSpace {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "FreeListSpace\n").unwrap();
write!(f, "{} used, {} total\n", self.used_bytes, self.size).unwrap();
write!(f, "nodes:\n").unwrap();
for node in self.current_nodes() {
write!(f, " {}\n", node).unwrap();
}
write!(f, "done\n")
}
}
impl fmt::Display for FreeListNode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "FreeListNode#{}(start={:#X}, size={}, state={:?})", self.id, self.start, self.size, self.mark)
}
}
\ No newline at end of file
#include <inttypes.h>
#include <stdlib.h>
void* malloc_zero(size_t size) {
void* ret = malloc(size);
memset(ret, 0, size);
return ret;
}
uintptr_t immmix_get_stack_ptr();
uintptr_t immmix_get_stack_ptr() {
uintptr_t rsp;
// get current rsp, rbp (this C func frame)
__asm__(
"mov %%rsp, %0 \n"
: "=rm" (rsp)
);
return rsp;
}
int get_registers_count() {
return 16;
}