WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

Commit d7234e90 authored by Kunshan Wang's avatar Kunshan Wang
Browse files

Testing memory manager...

parent 25007640
......@@ -43,11 +43,4 @@ class MicroVM(heapSize: Word = MicroVM.DEFAULT_HEAP_SIZE,
def newClientAgent(): ClientAgent = new ClientAgent(this)
def addClientAgent(ca: ClientAgent): Unit = {
clientAgents.add(ca)
}
def removeClientAgent(ca: ClientAgent): Unit = {
clientAgents.remove(ca)
}
}
\ No newline at end of file
......@@ -28,7 +28,7 @@ trait UndefinedFunctionHandler {
class ClientAgent(microVM: MicroVM) {
val handles = new HashSet[Handle]()
microVM.addClientAgent(this)
microVM.clientAgents.add(this)
val mutator = microVM.memoryManager.heap.makeMutator()
......@@ -45,7 +45,7 @@ class ClientAgent(microVM: MicroVM) {
def close(): Unit = {
handles.clear()
mutator.close()
microVM.removeClientAgent(this)
microVM.clientAgents.remove(this)
}
def loadBundle(r: Reader): Unit = {
......
......@@ -24,8 +24,8 @@ class ThreadStackManager(microVM: MicroVM) {
private def makeThreadID(): Int = {val id = nextThreadID; nextThreadID += 1; id}
def newStack(function: Function, args: Seq[ValueBox]): InterpreterStack = {
val stackMemory = microVM.memoryManager.makeStackMemory()
def newStack(function: Function, args: Seq[ValueBox], mutator: Mutator): InterpreterStack = {
val stackMemory = microVM.memoryManager.makeStackMemory(mutator)
val id = makeStackID()
val sta = new InterpreterStack(id, stackMemory, function.versions.head, args)
stackRegistry.put(id, sta)
......
......@@ -26,7 +26,7 @@ object HeaderUtils extends StrictLogging {
}
def setTag(objRef: Word, tag: Word) {
logger.debug(s"Storing tag ${tag} at addr ${TypeSizes.GC_HEADER_OFFSET_TAG}")
logger.debug("Storing tag 0x%x at addr 0x%x".format(tag, objRef + TypeSizes.GC_HEADER_OFFSET_TAG))
MemorySupport.storeLong(objRef + TypeSizes.GC_HEADER_OFFSET_TAG, tag)
}
......
......@@ -15,12 +15,10 @@ class MemoryManager(val heapSize: Word, val globalSize: Word, val stackSize: Wor
val globalMemory = new GlobalMemory(MEMORY_BEGIN + heapSize, globalSize, microVM)
private val internalMutator = heap.makeMutator()
def makeMutator(): Mutator = heap.makeMutator()
def makeStackMemory(): StackMemory = {
val objRef = internalMutator.newHybrid(InternalTypes.BYTE_ARRAY, stackSize)
def makeStackMemory(mutator: Mutator): StackMemory = {
val objRef = mutator.newHybrid(InternalTypes.BYTE_ARRAY, stackSize)
val stackMemory = new StackMemory(objRef, stackSize, microVM)
stackMemory
}
......
......@@ -44,15 +44,15 @@ class RewindableBumpPointerAllocator(val begin: Word, val extend: Word, val micr
var curTopLoc = top
var reachBottom = false
while (!reachBottom) {
logger.debug(s"curTopLoc is ${curTopLoc}")
logger.debug("curTopLoc is 0x%x".format(curTopLoc))
val iRef = MemorySupport.loadLong(curTopLoc)
logger.debug(s"iRef is ${iRef}")
logger.debug("iRef is 0x%x".format(iRef))
if (iRef != 0) {
val hdr = HeaderUtils.getTag(iRef)
val typeID = (hdr & 0xffffffffL).toInt
logger.debug(s"hdr=${hdr}, typeID=${typeID}")
logger.debug("hdr=0x%x, typeID=0x%x".format(hdr, typeID))
val ty = microVM.globalBundle.typeNs(typeID)
logger.debug(s"type=${ty.repr}: ${ty.toString}")
logger.debug("type=%s: %s".format(ty.repr, ty.toString))
MemoryDataScanner.scanField(ty, 0, iRef, handler)
var prevTopLoc: Word = 0L
prevTopLoc = if (ty.isInstanceOf[TypeHybrid]) {
......
......@@ -90,7 +90,7 @@ class LargeObjectSpace(val heap: SimpleImmixHeap,
def markBlockByObjRef(objRef: Word) {
val blockAddr = objRefToBlockAddr(objRef)
logger.debug("marking block addr %d for obj %d...".format(blockAddr, objRef))
logger.debug("marking block addr 0x%x for obj 0x%x...".format(blockAddr, objRef))
markBlock(blockAddr)
}
......@@ -104,17 +104,17 @@ class LargeObjectSpace(val heap: SimpleImmixHeap,
var curBlock = head
val lastBlock = getPrev(curBlock)
var nextBlock = getNext(curBlock)
logger.debug("Begin iteration from %d to %d".format(curBlock, lastBlock))
logger.debug("Begin iteration from 0x%x to 0x%x".format(curBlock, lastBlock))
var finished = false
while (!finished) {
logger.debug("Visiting block %d..".format(curBlock))
logger.debug("Visiting block 0x%x..".format(curBlock))
val mark = getBlockMark(curBlock)
if (mark != MARK_BIT) {
logger.debug("Deallocating block addr %d...".format(curBlock))
logger.debug("Deallocating block addr 0x%x...".format(curBlock))
dealloc(curBlock)
anyDeallocated = true
} else {
logger.debug("Block addr %d contains live object.".format(curBlock))
logger.debug("Block addr 0x%x contains live object.".format(curBlock))
unmarkBlock(curBlock)
}
if (curBlock == lastBlock) {
......
......@@ -94,6 +94,7 @@ object MemoryDataScanner extends StrictLogging {
curAddr = TypeSizes.alignUp(curAddr + varSize, varAlign)
}
}
case _ => // Ignore non-reference fields.
}
}
}
......@@ -122,14 +122,14 @@ class SimpleImmixCollector(val heap: SimpleImmixHeap,
private def maybeMarkAndStat(addr: Word): Boolean = {
assert(addr != 0L, "addr should be non-zero before calling this function")
val oldHeader = HeaderUtils.getTag(addr)
logger.debug("GC header of %d is %x".format(addr, oldHeader))
logger.debug("GC header of 0x%x is 0x%x".format(addr, oldHeader))
val wasMarked = (oldHeader & MARK_MASK) != 0
if (!wasMarked) {
val newHeader = oldHeader | MARK_MASK
HeaderUtils.setTag(addr, newHeader)
logger.debug("Newly marked %d".format(addr))
logger.debug("Newly marked 0x%x".format(addr))
if (space.isInSpace(addr)) {
space.markBlockByObjRef(addr)
//space.markBlockByObjRef(addr)
val tag = HeaderUtils.getTag(addr)
val ty = HeaderUtils.getType(microVM, tag)
val used = ty match {
......@@ -144,7 +144,7 @@ class SimpleImmixCollector(val heap: SimpleImmixHeap,
val blockNum = space.objRefToBlockIndex(addr)
space.incStat(blockNum, used)
} else if (los.isInSpace(addr)) {
los.markBlockByObjRef(addr)
//los.markBlockByObjRef(addr)
} else {
throw new UvmRefImplException("Object ref %d not in any space".format(addr))
}
......@@ -179,7 +179,7 @@ class SimpleImmixCollector(val heap: SimpleImmixHeap,
private def maybeMove(toObj: Word, updateFunc: Word => Unit): Boolean = {
val oldHeader = HeaderUtils.getTag(toObj)
logger.debug("GC header of %d is %x".format(toObj, oldHeader))
logger.debug("GC header of 0x%x is 0x%x".format(toObj, oldHeader))
val markBit = oldHeader & MARK_MASK
val moveBit = oldHeader & MOVE_MASK
val wasMarked = markBit != 0
......@@ -214,14 +214,14 @@ class SimpleImmixCollector(val heap: SimpleImmixHeap,
val newHeader = oldHeader | MARK_MASK
HeaderUtils.setTag(actualObj, newHeader)
logger.debug(s"Newly marked ${actualObj}")
logger.debug("Newly marked 0x%x".format(actualObj))
if (space.isInSpace(actualObj)) {
space.markBlockByObjRef(actualObj)
} else if (los.isInSpace(actualObj)) {
los.markBlockByObjRef(actualObj)
} else {
throw new UvmRefImplException("Object ref %d not in any space".format(actualObj))
throw new UvmRefImplException("Object ref %x not in any space".format(actualObj))
}
true
}
......@@ -233,40 +233,42 @@ class SimpleImmixCollector(val heap: SimpleImmixHeap,
* old location when not moved.
*/
private def evacuate(oldObjRef: Word): Word = {
logger.debug("Evacuating object %d".format(oldObjRef))
logger.debug("Evacuating object 0x%x".format(oldObjRef))
if (!canDefrag) {
logger.debug("No more reserved blocks.")
oldObjRef
} else {
val tag = HeaderUtils.getTag(oldObjRef)
val ty = HeaderUtils.getType(microVM, tag)
val (newObjRef, oldSize): (Long, Long) = ty match {
case htype: TypeHybrid => {
val len = HeaderUtils.getVarLength(oldObjRef)
val nor = defragMutator.newHybrid(htype, len)
val os = TypeSizes.hybridSizeOf(htype, len)
(nor, os)
}
case _ => {
val nor = defragMutator.newScalar(ty)
val os = TypeSizes.sizeOf(ty)
(nor, os)
try {
val (newObjRef, oldSize): (Long, Long) = ty match {
case htype: TypeHybrid => {
val len = HeaderUtils.getVarLength(oldObjRef)
val nor = defragMutator.newHybrid(htype, len)
val os = TypeSizes.hybridSizeOf(htype, len)
(nor, os)
}
case _ => {
val nor = defragMutator.newScalar(ty)
val os = TypeSizes.sizeOf(ty)
(nor, os)
}
}
}
if (newObjRef == 0) {
canDefrag = false
logger.debug("No more reserved blocks and thus no more moving.")
oldObjRef
} else {
val alignedOldSize = TypeSizes.alignUp(oldSize, TypeSizes.WORD_SIZE_BYTES)
logger.debug("Copying old object %d to %d, %d bytes (aligned up to %d bytes).".format(
logger.debug("Copying old object 0x%x to 0x%x, %d bytes (aligned up to %d bytes).".format(
oldObjRef, newObjRef, oldSize, alignedOldSize))
MemUtils.memcpy(oldObjRef, newObjRef, alignedOldSize)
val newTag = newObjRef | MOVE_MASK
HeaderUtils.setTag(oldObjRef, newTag)
newObjRef
} catch {
case e: NoMoreDefragBlockException =>
canDefrag = false
logger.debug("No more reserved blocks and thus no more moving.")
oldObjRef
}
}
}
......@@ -293,7 +295,7 @@ class SimpleImmixCollector(val heap: SimpleImmixHeap,
private def clearMark(objRef: Long): Boolean = {
val oldHeader = HeaderUtils.getTag(objRef)
logger.debug("GC header of %d is %x".format(objRef, oldHeader))
logger.debug("GC header of 0x%x is 0x%x".format(objRef, oldHeader))
val markBit = oldHeader & MARK_MASK
if (markBit != 0) {
val newHeader = oldHeader & ~(MARK_MASK | MOVE_MASK)
......
......@@ -26,8 +26,8 @@ class SimpleImmixDefragMutator(val heap: SimpleImmixHeap, val space: SimpleImmix
getNewBlock()
private def getNewBlock() {
val newAddr = space.getDefragBlock(curBlockAddr)
newAddr match {
curBlockAddr = space.getDefragBlock(curBlockAddr)
curBlockAddr match {
case Some(addr) =>
cursor = addr
limit = addr + SimpleImmixSpace.BLOCK_SIZE
......@@ -37,9 +37,9 @@ class SimpleImmixDefragMutator(val heap: SimpleImmixHeap, val space: SimpleImmix
override def alloc(size: Word, align: Word, headerSize: Word): Word = {
logger.debug(s"alloc(${size}, ${align}, ${headerSize})")
if (curBlockAddr == 0) {
if (curBlockAddr == None) {
logger.debug("No more reserved blocks. Cannot defragment.")
return 0
throw new NoMoreDefragBlockException("No more blocks for defrag.")
}
val actualAlign = if (align < TypeSizes.WORD_SIZE_BYTES) TypeSizes.WORD_SIZE_BYTES else align
tryTwice {
......@@ -67,6 +67,7 @@ class SimpleImmixDefragMutator(val heap: SimpleImmixHeap, val space: SimpleImmix
}
def close() {
logger.debug("Closing defrag mutator...")
curBlockAddr.foreach(space.returnBlock)
}
}
......@@ -7,16 +7,16 @@ import TypeSizes._
class SimpleImmixHeap(val begin: Word, val size: Word, val microVM: MicroVM) extends Heap {
val mid = begin + size / 2
val space: SimpleImmixSpace = new SimpleImmixSpace(this, "SimpleImmixSpace", begin, size / 2)
val los: LargeObjectSpace = new LargeObjectSpace(this, "Large object space", mid, size / 2)
val collector: SimpleImmixCollector = new SimpleImmixCollector(this, space, los, microVM)
val collectorThread: Thread = new Thread(collector)
val los: LargeObjectSpace = new LargeObjectSpace(this, "Large object space", mid, size / 2)
val mid = begin + size / 2
collectorThread.setDaemon(true)
collectorThread.start()
......
......@@ -62,6 +62,7 @@ class SimpleImmixMutator(val heap: SimpleImmixHeap, val space: SimpleImmixSpace,
}
def close() {
logger.debug("Closing mutator...")
curBlockAddr.foreach(space.returnBlock)
}
}
......@@ -24,8 +24,19 @@ class SimpleImmixSpace(val heap: SimpleImmixHeap, name: String, begin: Word, ext
import SimpleImmixSpace._
if (begin % BLOCK_SIZE != 0) {
throw new UvmRefImplException("space should be aligned to BLOCK_SIZE " + BLOCK_SIZE)
}
if (extend % BLOCK_SIZE != 0) {
throw new UvmRefImplException("space size should be a multiple of BLOCK_SIZE " + BLOCK_SIZE)
}
val nBlocks: Int = (extend / BLOCK_SIZE).toInt
/** The number of reserved blocks (for defrag). */
private val nReserved: Int = Math.max(nBlocks / 20, 1) // reserve at least one block
/** Flag for each block */
private val blockFlags: Array[Int] = new Array[Int](nBlocks)
......@@ -41,14 +52,11 @@ class SimpleImmixSpace(val heap: SimpleImmixHeap, name: String, begin: Word, ext
/** For each block, count how many bytes are occupied. */
private val blockUsedStats: Array[Word] = new Array[Word](nBlocks)
/** The number of reserved blocks (for defrag). */
private val nReserved: Int = nBlocks / 20
/** A list of free blocks reserved for defrag. */
private val defragResv: Array[Int] = new Array[Int](nReserved)
/** The number of free blocks (valid entries) in defragResv. */
private var defragResvFree: Int = _
private var defragResvFree: Int = nReserved
/** The index of the next free reserved block to allocate in defragResv. */
private var nextResv: Int = 0
......@@ -56,14 +64,6 @@ class SimpleImmixSpace(val heap: SimpleImmixHeap, name: String, begin: Word, ext
/** A list of buckets, for statistics. Used by defrag. */
private val buckets: Array[Word] = new Array[Word](N_BUCKETS)
if (begin % BLOCK_SIZE != 0) {
throw new UvmRefImplException("space should be aligned to BLOCK_SIZE " + BLOCK_SIZE)
}
if (extend % BLOCK_SIZE != 0) {
throw new UvmRefImplException("space size should be a multiple of BLOCK_SIZE " + BLOCK_SIZE)
}
for (i <- 0 until nReserved) { // Block 0 to nReserved-1 are reserved
defragResv(i) = i
reserve(i) // Set the reserved flat
......@@ -163,19 +163,7 @@ class SimpleImmixSpace(val heap: SimpleImmixHeap, name: String, begin: Word, ext
defragResvFree = newDefragResvFree
freeListValidCount = newNFree
if (logger.underlying.isDebugEnabled()) {
val sb1 = new StringBuilder("New reserved freelist:")
for (i <- 0 until defragResvFree) {
sb1.append(" ").append(defragResv(i))
}
logger.debug(sb1.toString)
val sb2 = new StringBuilder("New freelist:")
for (i <- 0 until freeListValidCount) {
sb2.append(" ").append(freeList(i))
}
logger.debug(sb2.toString)
for (i <- 0 until nBlocks) {
logger.debug(s"blockFlags[${i}] = ${blockFlags(i)}")
}
debugLogBlockStates()
}
nextResv = 0
nextFree = 0
......@@ -185,6 +173,7 @@ class SimpleImmixSpace(val heap: SimpleImmixHeap, name: String, begin: Word, ext
def returnBlock(blockAddr: Word) {
val blockNum = blockAddrToBlockIndex(blockAddr)
unreserve(blockNum)
logger.debug("Block %d returned to space.".format(blockNum))
}
// Statistics
......@@ -253,14 +242,31 @@ class SimpleImmixSpace(val heap: SimpleImmixHeap, name: String, begin: Word, ext
if (myCursor >= defragResvFree) {
return None
}
nextResv += 1
val blockNum = defragResv(myCursor)
val blockAddr = blockIndexToBlockAddr(blockNum)
MemUtils.zeroRegion(blockAddr, BLOCK_SIZE)
return Some(blockAddr)
}
// Debugging
def debugLogBlockStates() {
val sb1 = new StringBuilder("Reserved freelist:")
for (i <- 0 until defragResvFree) {
sb1.append(" ").append(defragResv(i))
}
logger.debug(sb1.toString)
val sb2 = new StringBuilder("Freelist:")
for (i <- 0 until freeListValidCount) {
sb2.append(" ").append(freeList(i))
}
logger.debug(sb2.toString)
for (i <- 0 until nBlocks) {
logger.debug(s"blockFlags[${i}] = ${blockFlags(i)}")
}
}
}
package uvm.refimpl.mem.simpleimmix
import uvm.refimpl.UvmOutOfMemoryException
class NoMoreDefragBlockException(message: String = null, cause: Throwable = null) extends UvmOutOfMemoryException(message, cause)
package uvm.refimpl.mem
import org.scalatest._
import uvm.refimpl._
import java.io.FileReader
class UvmMemSpec extends FlatSpec with Matchers with BeforeAndAfter {
// The heap size is intentionally reduced to make GC more often
val microVM = new MicroVM(heapSize = 512L * 1024L);
{
microVM.memoryManager.heap.space.debugLogBlockStates()
val ca = microVM.newClientAgent()
val r = new FileReader("tests/uvm-refimpl-test/uvm-mem-test-bundle.uir")
ca.loadBundle(r)
r.close()
ca.close()
microVM.memoryManager.heap.space.debugLogBlockStates()
}
behavior of "UVM memory manager"
it should "allocate scalar objects" in {
val ca = microVM.newClientAgent()
val h = ca.newFixed(ca.idOf("@i64"))
val h2 = ca.newFixed(ca.idOf("@a0"))
ca.close()
microVM.memoryManager.heap.space.debugLogBlockStates()
}
it should "allocate hybrid objects" in {
val ca = microVM.newClientAgent()
val hlen = ca.putInt(ca.idOf("@i64"), 1024)
val h = ca.newHybrid(ca.idOf("@h0"), hlen)
val hlen2 = ca.putInt(ca.idOf("@i64"), 128*1024)
val h2 = ca.newHybrid(ca.idOf("@h0"), hlen2)
ca.close()
}
it should "automatically trigger GC if the memory is full" in {
val ca = microVM.newClientAgent()
val hheld = ca.newFixed(ca.idOf("@i64")) // Objects held in the CA should survive the GC
val hlen = ca.putInt(ca.idOf("@i64"), 1024)
val allocCount = 300 // enough to fill the 256KiB small object space at least once.
for (i <- (0 until allocCount)) {
val h = ca.newHybrid(ca.idOf("@h0"), hlen)
ca.deleteHandle(h)
}
ca.close()
}
it should "defrag heavily fragmented heap" in {
val ca = microVM.newClientAgent()
val hlen = ca.putInt(ca.idOf("@i64"), 1024)
val allocCount = 300 // enough to fill the 256KiB small object space at least once.
for (i <- (0 until allocCount)) {
val h = ca.newHybrid(ca.idOf("@h0"), hlen)
ca.deleteHandle(h)
val hBreadcrumb = ca.newFixed(ca.idOf("@i64"))
}
ca.close()
}}
\ No newline at end of file
.typedef @i1 = int<1>
.typedef @i8 = int<8>
.typedef @i16 = int<16>
.typedef @i32 = int<32>
.typedef @i64 = int<64>
.typedef @float = float
.typedef @double = double
.typedef @rv = ref<@void>
.typedef @irv = iref<@void>
.typedef @wrv = weakref<@void>
.typedef @ri16 = ref<@i16>
.typedef @s1 = struct<@i8 @i16 @i32 @i64 @float @double @rv @irv @wrv @ri16>
.typedef @Cons = struct<@i64 @RefCons>
.typedef @RefCons = ref<@Cons>
.typedef @foo = struct<@double @i64>
.typedef @a0 = array<@i8 100>
.typedef @a1 = array<@foo 10>
.typedef @a2 = array<@a1 10>
.typedef @h0 = hybrid <@void @i8>
.typedef @h1 = hybrid <@foo @i64>
.typedef @void = void
.funcsig @sig0 = @void ()
.typedef @ii8 = iref<@i8>
.typedef @iii8 = iref<@ii8>
.funcsig @sig1 = @i32 (@i32 @iii8)
.typedef @f0 = func <@sig0>
.typedef @f1 = func <@sig1>
.typedef @th = thread
.typedef @st = stack
.typedef @tr64 = tagref64
.typedef @4xfloat = vector <@float 4>
.typedef @4xi32 = vector <@i32 4>
.typedef @2xdouble = vector <@double 2>
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment