Commit 4ef8ab7d authored by Kunshan Wang's avatar Kunshan Wang

Memory allocation and addressing.

parent 2b39bedd
......@@ -421,9 +421,9 @@ class InterpreterThread(val id: Int, microVM: MicroVM, initialStack: Interpreter
case i @ InstExtractElement(vecTy, indTy, opnd, index) => {
val ob = boxOf(opnd).asInstanceOf[BoxVector]
val indb = boxOf(index).asInstanceOf[BoxInt]
val ind = OpHelper.prepareSigned(indb.value, indTy.length)
val ind = OpHelper.prepareUnsigned(indb.value, indTy.length)
if (ind < 0 || ind > vecTy.len) {
if (ind > vecTy.len) {
throw new UvmRuntimeException(ctx + "Index %d out of range. Vector type: %s".format(ind, vecTy))
}
......@@ -437,9 +437,9 @@ class InterpreterThread(val id: Int, microVM: MicroVM, initialStack: Interpreter
val ob = boxOf(opnd).asInstanceOf[BoxVector]
val indb = boxOf(index).asInstanceOf[BoxInt]
val ind = OpHelper.prepareSigned(indb.value, indTy.length)
val ind = OpHelper.prepareUnsigned(indb.value, indTy.length)
if (ind < 0 || ind > vecTy.len) {
if (ind > vecTy.len) {
throw new UvmRuntimeException(ctx + "Index %d out of range. Vector type: %s".format(ind, vecTy))
}
......@@ -467,8 +467,8 @@ class InterpreterThread(val id: Int, microVM: MicroVM, initialStack: Interpreter
val ib = boxOf(i).asInstanceOf[BoxVector]
for (((meb, ieb), ind) <- (mb.values zip ib.values).zipWithIndex) {
val me = OpHelper.prepareSigned(meb.asInstanceOf[BoxInt].value, maskIntLen)
if (0 <= me && me < vecLen) {
val me = OpHelper.prepareUnsigned(meb.asInstanceOf[BoxInt].value, maskIntLen)
if (me < vecLen) {
ieb.copyFrom(vb1.values(me.intValue))
} else if (vecLen <= me && me < vecLen * 2) {
ieb.copyFrom(vb2.values(me.intValue - vecLen))
......@@ -478,6 +478,100 @@ class InterpreterThread(val id: Int, microVM: MicroVM, initialStack: Interpreter
}
continueNormally()
}
case i @ InstNew(allocTy, excClause) => {
handleOutOfMemory(excClause) {
val addr = mutator.newScalar(allocTy)
val ib = boxOf(i).asInstanceOf[BoxRef]
ib.objRef = addr
continueNormally()
}
}
case i @ InstNewHybrid(allocTy, lenTy, length, excClause) => {
handleOutOfMemory(excClause) {
val lb = boxOf(length).asInstanceOf[BoxInt]
val len = OpHelper.prepareUnsigned(lb.value, lenTy.length)
val addr = mutator.newHybrid(allocTy, len.longValue)
val ib = boxOf(i).asInstanceOf[BoxRef]
ib.objRef = addr
continueNormally()
}
}
case i @ InstAlloca(allocTy, excClause) => {
handleOutOfMemory(excClause) {
val addr = mutator.allocaScalar(curStack.stackMemory, allocTy)
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = addr
ib.offset = 0L
continueNormally()
}
}
case i @ InstAllocaHybrid(allocTy, lenTy, length, excClause) => {
handleOutOfMemory(excClause) {
val lb = boxOf(length).asInstanceOf[BoxInt]
val len = OpHelper.prepareUnsigned(lb.value, lenTy.length)
val addr = mutator.allocaHybrid(curStack.stackMemory, allocTy, len.longValue)
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = addr
ib.offset = 0L
continueNormally()
}
}
case i @ InstGetIRef(referentTy, opnd) => {
val ob = boxOf(opnd).asInstanceOf[BoxRef]
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = 0L
continueNormally()
}
case i @ InstGetFieldIRef(referentTy, index, opnd) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.fieldOffsetOf(referentTy, index)
continueNormally()
}
case i @ InstGetElemIRef(referentTy, indTy, opnd, index) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val indb = boxOf(index).asInstanceOf[BoxInt]
val ind = OpHelper.prepareSigned(indb.value, indTy.length)
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.elemOffsetOf(referentTy, ind.longValue())
continueNormally()
}
case i @ InstShiftIRef(referentTy, offTy, opnd, offset) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val offb = boxOf(offset).asInstanceOf[BoxInt]
val off = OpHelper.prepareSigned(offb.value, offTy.length)
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.shiftOffsetOf(referentTy, off.longValue())
continueNormally()
}
case i @ InstGetFixedPartIRef(referentTy, opnd) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset
continueNormally()
}
case i @ InstGetVarPartIRef(referentTy, opnd) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.varPartOffsetOf(referentTy)
continueNormally()
}
// Indentation guide: Insert more instructions here.
......@@ -674,4 +768,16 @@ class InterpreterThread(val id: Int, microVM: MicroVM, initialStack: Interpreter
}
}
private def handleOutOfMemory(excClause: Option[ExcClause])(f: => Unit): Unit = {
try {
f
} catch {
case e: UvmOutOfMemoryException => {
excClause match {
case None => throw new UvmRuntimeException(ctx + "Out of memory and there is no handler.", e)
case Some(ExcClause(_, excBB)) => branchAndMovePC(excBB, 0L)
}
}
}
}
}
......@@ -7,6 +7,7 @@ import uvm.types._
import uvm.ssavariables._
import uvm.refimpl._
import uvm.refimpl.itpr._
import uvm.refimpl.mem._
import MemoryOrder._
import AtomicRMWOptr._
import uvm.refimpl.mem.TypeSizes.Word
......@@ -768,16 +769,16 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
testFunc(ca, func, Seq()) { (ca, th, st, wp) =>
val Seq(f1, f12) = ca.dumpKeepalives(st, 0)
f1.vb.asSInt(64) shouldEqual 2
f12.vb.asSInt(64) shouldEqual 7
TrapRebindPassVoid(st)
}
ca.close()
}
"EXTRACTELEMENT and INSERTELEMENT" should "work on vectors" in {
val ca = microVM.newClientAgent()
......@@ -785,15 +786,63 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
testFunc(ca, func, Seq()) { (ca, th, st, wp) =>
val Seq(ee0, ie0, sv0) = ca.dumpKeepalives(st, 0)
ee0.vb.asFloat shouldEqual 0.0f
ie0.vb.asVec.map(_.asFloat) shouldEqual Seq(0.0f, 6.0f, 2.0f, 3.0f)
sv0.vb.asVec.map(_.asFloat) shouldEqual Seq(0.0f, 2.0f, 4.0f, 6.0f)
TrapRebindPassVoid(st)
}
ca.close()
}
"NEW, NEWHYBRID, ALLOCA, ALLOCAHYBRID" should "work" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@allocs")
val sz = ca.putInt("@i64", 20)
testFunc(ca, func, Seq(sz)) { (ca, th, st, wp) =>
val Seq(n, nh, a, ah) = ca.dumpKeepalives(st, 0)
// nothing to check at this moment
TrapRebindPassVoid(st)
}
ca.close()
}
"GETIREF, GETFIELDIREF, GITELEMIREF, SHIFTIREF, GETFIXEDPARTIREF AND GETVARPARTIREF" should "work" in {
implicit def typeOf(name: String): Type = microVM.globalBundle.typeNs(name)
implicit def structTypeOf(name: String): TypeStruct = typeOf(name).asInstanceOf[TypeStruct]
implicit def seqTypeOf(name: String): AbstractSeqType = typeOf(name).asInstanceOf[AbstractSeqType]
implicit def hybridTypeOf(name: String): TypeHybrid = typeOf(name).asInstanceOf[TypeHybrid]
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAddressing")
testFunc(ca, func, Seq()) { (ca, th, st, wp) =>
val Seq(barRef, barIRef, bar3, bazIRef, baz3, baz6, jaRef, jaIRef, jaFix, jaVar) = ca.dumpKeepalives(st, 0)
barIRef.vb.asIRef shouldEqual (barRef.vb.asRef, 0L)
bar3.vb.asIRefAddr shouldEqual (barRef.vb.asRef + TypeSizes.fieldOffsetOf("@StructBar", 3))
baz3.vb.asIRefAddr shouldEqual (bazIRef.vb.asIRefAddr + TypeSizes.elemOffsetOf("@ArrayBaz", 3))
baz6.vb.asIRefAddr shouldEqual (bazIRef.vb.asIRefAddr + TypeSizes.elemOffsetOf("@ArrayBaz", 6))
jaIRef.vb.asIRefAddr shouldEqual (jaRef.vb.asRef)
jaFix.vb.asIRefAddr shouldEqual (jaRef.vb.asRef)
jaVar.vb.asIRefAddr shouldEqual (jaRef.vb.asRef + TypeSizes.varPartOffsetOf("@JavaLikeByteArray"))
TrapRebindPassVoid(st)
}
ca.close()
}
}
\ No newline at end of file
......@@ -473,63 +473,58 @@
COMMINST @uvm.thread_exit
}
// .typedef @refi64 = ref<@i64>
// .typedef @irefi64 = iref<@i64>
// .typedef @weakrefi64 = weakref<@i64>
//
// .typedef @StructBar = struct <
// @i64 @i32 @i16 @i8 @float @double
// @refi64 @irefi64 @weakrefi64
// >
//
// .typedef @refBar = ref<@StructBar>
// .typedef @irefBar = iref<@StructBar>
//
// .typedef @hCharArray = hybrid<@i64 @i8>
//
// .funcsig @allocs_sig = @void (@i64)
// .funcdef @allocs VERSION @allocs_v1 <@allocs_sig> (%sz) {
// %entry:
// %new = NEW <@StructBar>
// %newhybrid = NEWHYBRID <@hCharArray> %sz
// %alloca = ALLOCA <@StructBar>
// %allocahybrid = ALLOCAHYBRID <@hCharArray> %sz
//
// %trap = TRAP <@void> KEEPALIVE (%new %newhybrid %alloca %allocahybrid)
// %exit:
// COMMINST @uvm.thread_exit
// }
//
// .typedef @ArrayBaz = array <@i16 1024>
// .const @THREE <@i64> = 3
//
// .typedef @JavaLikeByteArray = hybrid <@i32 @i8>
//
// .const @I64_1024 <@i64> = 1024
//
// .funcsig @memAddressing_sig = @noparamsnoret
// .funcdef @memAddressing VERSION @memAddressing_v1 <@memAddressing_sig> () {
// %entry:
// %bar_ref = NEW <@StructBar>
// %bar_iref = GETIREF <@StructBar> %bar_ref
// %bar_3 = GETFIELDIREF <@StructBar 3> %bar_iref
//
// %baz_iref = ALLOCA <@ArrayBaz>
// %baz_3 = GETELEMIREF <@ArrayBaz> %baz_iref @THREE
// %baz_6 = SHIFTIREF <@i16> %baz_3 @THREE
//
// %ja_ref = NEWHYBRID <@JavaLikeByteArray> @I64_1024
// %ja_iref = GETIREF <@JavaLikeByteArray> %ja_ref
// %ja_fix = GETFIXEDPARTIREF <@JavaLikeByteArray> %ja_iref
// %ja_var = GETVARPARTIREF <@JavaLikeByteArray> %ja_iref
//
// %trap = TRAP <@void> KEEPALIVE (%bar_ref %bar_iref %bar_3
// %baz_iref %baz_3 %baz_6 %ja_ref %ja_iref %ja_fix %ja_var)
// %exit:
// COMMINST @uvm.thread_exit
//
// }
//
.typedef @refi64 = ref<@i64>
.typedef @irefi64 = iref<@i64>
.typedef @weakrefi64 = weakref<@i64>
.typedef @StructBar = struct <
@i64 @i32 @i16 @i8 @float @double
@refi64 @irefi64 @weakrefi64
>
.typedef @refBar = ref<@StructBar>
.typedef @irefBar = iref<@StructBar>
.typedef @hCharArray = hybrid<@i64 @i8>
.funcsig @allocs_sig = @void (@i64)
.funcdef @allocs VERSION @allocs_v1 <@allocs_sig> (%sz) {
%entry:
%new = NEW <@StructBar>
%newhybrid = NEWHYBRID <@hCharArray @i64> %sz
%alloca = ALLOCA <@StructBar>
%allocahybrid = ALLOCAHYBRID <@hCharArray @i64> %sz
%trap = TRAP <@void> KEEPALIVE (%new %newhybrid %alloca %allocahybrid)
COMMINST @uvm.thread_exit
}
.typedef @ArrayBaz = array <@i16 1024>
.typedef @JavaLikeByteArray = hybrid <@i32 @i8>
.const @I64_1024 <@i64> = 1024
.funcdef @memAddressing VERSION @memAddressing_v1 <@noparamsnoret> () {
%entry:
%bar_ref = NEW <@StructBar>
%bar_iref = GETIREF <@StructBar> %bar_ref
%bar_3 = GETFIELDIREF <@StructBar 3> %bar_iref
%baz_iref = ALLOCA <@ArrayBaz>
%baz_3 = GETELEMIREF <@ArrayBaz @i64> %baz_iref @I64_3
%baz_6 = SHIFTIREF <@i16 @i64> %baz_3 @I64_3
%ja_ref = NEWHYBRID <@JavaLikeByteArray @i64> @I64_1024
%ja_iref = GETIREF <@JavaLikeByteArray> %ja_ref
%ja_fix = GETFIXEDPARTIREF <@JavaLikeByteArray> %ja_iref
%ja_var = GETVARPARTIREF <@JavaLikeByteArray> %ja_iref
%trap = TRAP <@void> KEEPALIVE (%bar_ref %bar_iref %bar_3
%baz_iref %baz_3 %baz_6 %ja_ref %ja_iref %ja_fix %ja_var)
COMMINST @uvm.thread_exit
}
// .global @g_i8 <@i8>
// .global @g_i16 <@i16>
// .global @g_i32 <@i32>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment