GitLab will be upgraded to the 12.10.14-ce.0 on 28 Sept 2020 at 2.00pm (AEDT) to 2.30pm (AEDT). During the update, GitLab and Mattermost services will not be available. If you have any concerns with this, please talk to us at N110 (b) CSIT building.

Commit b2e215ec authored by Kunshan Wang's avatar Kunshan Wang

Memory operations via pointers.

parent 31a531fe
......@@ -95,6 +95,11 @@ class ClientAgent(microVM: MicroVM) {
newHandle(t, BoxVector(vs.map(BoxDouble)))
}
def putPointer(typeID: Int, v: Word): Handle = {
val t = microVM.globalBundle.typeNs(typeID)
newHandle(t, BoxPointer(v))
}
def putConstant(id: Int): Handle = {
val c = microVM.globalBundle.constantNs(id)
val t = c.constTy
......@@ -153,6 +158,10 @@ class ClientAgent(microVM: MicroVM) {
h.vb.asInstanceOf[BoxVector].values.map(b => b.asInstanceOf[BoxDouble].value)
}
def toPointer(h: Handle): Word = {
h.vb.asInstanceOf[BoxPointer].addr
}
def extractValue(str: Handle, index: Int): Handle = {
val st = str.ty.asInstanceOf[TypeStruct]
val sb = str.vb.asInstanceOf[BoxStruct]
......@@ -251,48 +260,56 @@ class ClientAgent(microVM: MicroVM) {
}
def load(ord: MemoryOrder, loc: Handle): Handle = {
val ty = loc.ty.asInstanceOf[TypeIRef].ty
val (ptr, ty) = loc.ty match {
case TypeIRef(t) => (false, t)
case TypePtr(t) => (true, t)
}
val uty = InternalTypePool.unmarkedOf(ty)
val b = loc.vb.asInstanceOf[BoxIRef]
val iRef = b.objRef + b.offset
val addr = MemoryOperations.addressOf(ptr, loc.vb)
val nb = ValueBox.makeBoxForType(uty)
MemoryOperations.load(uty, iRef, nb)
MemoryOperations.load(ptr, uty, addr, nb)
newHandle(uty, nb)
}
def store(ord: MemoryOrder, loc: Handle, newVal: Handle): Unit = {
val ty = loc.ty.asInstanceOf[TypeIRef].ty
val (ptr, ty) = loc.ty match {
case TypeIRef(t) => (false, t)
case TypePtr(t) => (true, t)
}
val uty = InternalTypePool.unmarkedOf(ty)
val lb = loc.vb.asInstanceOf[BoxIRef]
val iRef = lb.objRef + lb.offset
val addr = MemoryOperations.addressOf(ptr, loc.vb)
val nvb = newVal.vb
val nb = ValueBox.makeBoxForType(uty)
MemoryOperations.store(uty, iRef, nvb, nb)
MemoryOperations.store(ptr, uty, addr, nvb, nb)
}
def cmpXchg(ordSucc: MemoryOrder, ordFail: MemoryOrder, weak: Boolean, loc: Handle, expected: Handle, desired: Handle): (Boolean, Handle) = {
val ty = loc.ty.asInstanceOf[TypeIRef].ty
val (ptr, ty) = loc.ty match {
case TypeIRef(t) => (false, t)
case TypePtr(t) => (true, t)
}
val uty = InternalTypePool.unmarkedOf(ty)
val lb = loc.vb.asInstanceOf[BoxIRef]
val iRef = lb.objRef + lb.offset
val addr = MemoryOperations.addressOf(ptr, loc.vb)
val eb = expected.vb
val db = desired.vb
val br = ValueBox.makeBoxForType(uty)
val succ = MemoryOperations.cmpXchg(uty, iRef, eb, db, br)
val succ = MemoryOperations.cmpXchg(ptr, uty, addr, eb, db, br)
(succ, newHandle(uty, br))
}
def atomicRMW(ord: MemoryOrder, op: AtomicRMWOptr, loc: Handle, opnd: Handle): Handle = {
val ty = loc.ty.asInstanceOf[TypeIRef].ty
val (ptr, ty) = loc.ty match {
case TypeIRef(t) => (false, t)
case TypePtr(t) => (true, t)
}
val uty = InternalTypePool.unmarkedOf(ty)
val lb = loc.vb.asInstanceOf[BoxIRef]
val iRef = lb.objRef + lb.offset
val addr = MemoryOperations.addressOf(ptr, loc.vb)
val ob = opnd.vb
val br = ValueBox.makeBoxForType(uty)
MemoryOperations.atomicRMW(uty, op, iRef, ob, br)
MemoryOperations.atomicRMW(ptr, uty, op, addr, ob, br)
newHandle(uty, br)
}
......
......@@ -50,7 +50,9 @@ object InternalTypePool {
val refOf = LazyPool(TypeRef)
val irefOf = LazyPool(TypeIRef)
val ptrOf = LazyPool(TypePtr)
val funcOf = LazyPool(TypeFunc)
val funcPtrOf = LazyPool(TypeFuncPtr)
val vecOf = new LazyPool[(Type, Long), TypeVector]({ case (t, l) => TypeVector(t, l) })
def unmarkedOf(t: Type): Type = t match {
case TypeWeakRef(r) => refOf(r)
......@@ -61,6 +63,11 @@ object InternalTypePool {
object TypeInferer {
import InternalTypes._
import InternalTypePool._
def ptrOrIRefOf(ptr: Boolean, ty: Type): Type = {
if (ptr) ptrOf(ty) else irefOf(ty)
}
def inferType(v: SSAVariable): Type = v match {
case c: Constant => c.constTy
case g: GlobalCell => irefOf(g.cellTy)
......@@ -93,11 +100,11 @@ object TypeInferer {
case i: InstAlloca => irefOf(i.allocTy)
case i: InstAllocaHybrid => irefOf(i.allocTy)
case i: InstGetIRef => irefOf(i.referentTy)
case i: InstGetFieldIRef => irefOf(i.referentTy.fieldTy(i.index))
case i: InstGetElemIRef => irefOf(i.referentTy.elemTy)
case i: InstShiftIRef => irefOf(i.referentTy)
case i: InstGetFixedPartIRef => irefOf(i.referentTy.fixedTy)
case i: InstGetVarPartIRef => irefOf(i.referentTy.varTy)
case i: InstGetFieldIRef => ptrOrIRefOf(i.ptr, i.referentTy.fieldTy(i.index))
case i: InstGetElemIRef => ptrOrIRefOf(i.ptr, i.referentTy.elemTy)
case i: InstShiftIRef => ptrOrIRefOf(i.ptr, i.referentTy)
case i: InstGetFixedPartIRef => ptrOrIRefOf(i.ptr, i.referentTy.fixedTy)
case i: InstGetVarPartIRef => ptrOrIRefOf(i.ptr, i.referentTy.varTy)
case i: InstLoad => unmarkedOf(i.referentTy)
case i: InstStore => VOID
case i: InstCmpXchg => unmarkedOf(i.referentTy)
......
......@@ -358,6 +358,30 @@ class InterpreterThread(val id: Int, implicit private val microVM: MicroVM, init
"REFCAST can only convert between two types both of which are ref, iref, or func. Found %s and %s.".format(scalarFromTy, scalarToTy))
}
def ptrcast(): Unit = {
(scalarFromTy, scalarToTy) match {
case (TypeInt(_), TypeInt(_)) => throw new UvmRuntimeException(ctx +
"PTRCAST cannot convert between two int types. Found %s and %s.".format(scalarFromTy, scalarToTy))
case _ =>
}
val srcAddr: Word = scalarFromTy match {
case TypeInt(n) => {
val od = bOpnd.asInstanceOf[BoxInt].value
val truncExt = if (n >= 64) OpHelper.trunc(od, 64) else OpHelper.zext(od, n, 64)
truncExt.toLong
}
case TypePtr(_) | TypeFuncPtr(_) => bOpnd.asInstanceOf[BoxPointer].addr
}
scalarToTy match {
case TypeInt(n) => {
val bi = BigInt(srcAddr)
val truncExt = if (n > 64) OpHelper.zext(bi, 64, n) else OpHelper.trunc(bi, n)
br.asInstanceOf[BoxInt].value = truncExt
}
case TypePtr(_) | TypeFuncPtr(_) => br.asInstanceOf[BoxPointer].addr = srcAddr
}
}
op match {
case ConvOptr.TRUNC => iToI()
case ConvOptr.ZEXT => iToI()
......@@ -378,6 +402,7 @@ class InterpreterThread(val id: Int, implicit private val microVM: MicroVM, init
case ConvOptr.SITOFP => iToFP(signed = true)
case ConvOptr.BITCAST => bitcast()
case ConvOptr.REFCAST => refcast()
case ConvOptr.PTRCAST => ptrcast()
}
}
......@@ -634,105 +659,94 @@ class InterpreterThread(val id: Int, implicit private val microVM: MicroVM, init
}
case i @ InstGetFieldIRef(ptr, referentTy, index, opnd) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.fieldOffsetOf(referentTy, index)
val addrIncr = TypeSizes.fieldOffsetOf(referentTy, index)
incrementBoxIRefOrPointer(ptr, opnd, i, addrIncr)
continueNormally()
}
case i @ InstGetElemIRef(ptr, referentTy, indTy, opnd, index) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val indb = boxOf(index).asInstanceOf[BoxInt]
val ind = OpHelper.prepareSigned(indb.value, indTy.length)
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.elemOffsetOf(referentTy, ind.longValue())
val addrIncr = TypeSizes.elemOffsetOf(referentTy, ind.longValue())
incrementBoxIRefOrPointer(ptr, opnd, i, addrIncr)
continueNormally()
}
case i @ InstShiftIRef(ptr, referentTy, offTy, opnd, offset) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val offb = boxOf(offset).asInstanceOf[BoxInt]
val off = OpHelper.prepareSigned(offb.value, offTy.length)
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.shiftOffsetOf(referentTy, off.longValue())
val addrIncr = TypeSizes.shiftOffsetOf(referentTy, off.longValue())
incrementBoxIRefOrPointer(ptr, opnd, i, addrIncr)
continueNormally()
}
case i @ InstGetFixedPartIRef(ptr, referentTy, opnd) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset
incrementBoxIRefOrPointer(ptr, opnd, i, 0L)
continueNormally()
}
case i @ InstGetVarPartIRef(ptr, referentTy, opnd) => {
val ob = boxOf(opnd).asInstanceOf[BoxIRef]
val ib = boxOf(i).asInstanceOf[BoxIRef]
ib.objRef = ob.objRef
ib.offset = ob.offset + TypeSizes.varPartOffsetOf(referentTy)
val addrIncr = TypeSizes.varPartOffsetOf(referentTy)
incrementBoxIRefOrPointer(ptr, opnd, i, addrIncr)
continueNormally()
}
case i @ InstLoad(ptr, ord, referentTy, loc, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
val addr = addressOf(ptr, loc)
if (addr == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.load(uty, la, ib)
MemoryOperations.load(ptr, uty, addr, ib)
continueNormally()
}
}
case i @ InstStore(ptr, ord, referentTy, loc, newVal, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val nvb = boxOf(newVal)
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
val addr = addressOf(ptr, loc)
if (addr == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.store(uty, la, nvb, ib)
MemoryOperations.store(ptr, uty, addr, nvb, ib)
continueNormally()
}
}
case i @ InstCmpXchg(ptr, weak, ordSucc, ordFail, referentTy, loc, expected, desired, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val eb = boxOf(expected)
val db = boxOf(desired)
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
val addr = addressOf(ptr, loc)
if (addr == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.cmpXchg(uty, la, eb, db, ib)
MemoryOperations.cmpXchg(ptr, uty, addr, eb, db, ib)
continueNormally()
}
}
case i @ InstAtomicRMW(ptr, ord, op, referentTy, loc, opnd, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val ob = boxOf(opnd)
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
val addr = addressOf(ptr, loc)
if (addr == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.atomicRMW(uty, op, la, ob, ib)
MemoryOperations.atomicRMW(ptr, uty, op, addr, ob, ib)
continueNormally()
}
}
......@@ -1188,6 +1202,26 @@ class InterpreterThread(val id: Int, implicit private val microVM: MicroVM, init
box.asInstanceOf[BoxInt].value = OpHelper.unprepare(result, len)
}
private def incrementBoxIRefOrPointer(ptr: Boolean, src: SSAVariable, dst: SSAVariable, addrIncr: Word): Unit = {
if (ptr) {
val sb = boxOf(src).asInstanceOf[BoxPointer]
val db = boxOf(dst).asInstanceOf[BoxPointer]
db.addr = sb.addr + addrIncr
} else {
val sb = boxOf(src).asInstanceOf[BoxIRef]
val db = boxOf(dst).asInstanceOf[BoxIRef]
db.objRef = sb.objRef
db.offset = sb.offset + addrIncr
}
}
private def addressOf(ptr: Boolean, v: SSAVariable): Word = {
MemoryOperations.addressOf(ptr, boxOf(v))
}
def incrementBoxPointer(src: BoxPointer, dst: BoxPointer, addrIncr: Word): Unit = {
}
// Thread termination
/** Terminate the thread. Please only let the thread terminate itself. */
......
......@@ -51,9 +51,9 @@ class MemorySupport(val muMemorySize: Word) {
def cmpXchgInt(addr: Word, expected: Int, desired: Int, inMu: Boolean = true): (Boolean, Int) = {
assertInMuMemory(inMu, addr)
val oldVal = loadInt(addr)
val oldVal = loadInt(addr, inMu)
if (oldVal == expected) {
storeInt(addr, desired)
storeInt(addr, desired, inMu)
return (true, oldVal)
} else {
return (false, oldVal)
......@@ -62,9 +62,9 @@ class MemorySupport(val muMemorySize: Word) {
def cmpXchgLong(addr: Word, expected: Long, desired: Long, inMu: Boolean = true): (Boolean, Long) = {
assertInMuMemory(inMu, addr)
val oldVal = loadLong(addr)
val oldVal = loadLong(addr, inMu)
if (oldVal == expected) {
storeLong(addr, desired)
storeLong(addr, desired, inMu)
return (true, oldVal)
} else {
return (false, oldVal)
......@@ -73,9 +73,9 @@ class MemorySupport(val muMemorySize: Word) {
def cmpXchgI128(addr: Word, expected: (Long, Long), desired: (Long, Long), inMu: Boolean = true): (Boolean, (Long, Long)) = {
assertInMuMemory(inMu, addr)
val oldVal = loadI128(addr)
val oldVal = loadI128(addr, inMu)
if (oldVal == expected) {
storeI128(addr, desired)
storeI128(addr, desired, inMu)
return (true, oldVal)
} else {
return (false, oldVal)
......@@ -84,7 +84,7 @@ class MemorySupport(val muMemorySize: Word) {
def atomicRMWInt(optr: AtomicRMWOptr, addr: Word, opnd: Int, inMu: Boolean = true): Int = {
assertInMuMemory(inMu, addr)
val oldVal = loadInt(addr)
val oldVal = loadInt(addr, inMu)
val newVal = optr match {
case XCHG => opnd
case ADD => oldVal + opnd
......@@ -98,13 +98,13 @@ class MemorySupport(val muMemorySize: Word) {
case UMAX => Math.max(oldVal - Int.MinValue, opnd - Int.MinValue) + Int.MinValue
case UMIN => Math.min(oldVal - Int.MinValue, opnd - Int.MinValue) + Int.MinValue
}
storeInt(addr, newVal)
storeInt(addr, newVal, inMu)
return oldVal
}
def atomicRMWLong(optr: AtomicRMWOptr, addr: Word, opnd: Long, inMu: Boolean = true): Long = {
assertInMuMemory(inMu, addr)
val oldVal = loadLong(addr)
val oldVal = loadLong(addr, inMu)
val newVal = optr match {
case XCHG => opnd
case ADD => oldVal + opnd
......@@ -118,14 +118,14 @@ class MemorySupport(val muMemorySize: Word) {
case UMAX => Math.max(oldVal - Long.MinValue, opnd - Long.MinValue) + Long.MinValue
case UMIN => Math.min(oldVal - Long.MinValue, opnd - Long.MinValue) + Long.MinValue
}
storeLong(addr, newVal)
storeLong(addr, newVal, inMu)
return oldVal
}
def xchgI128(addr: Word, desired: (Long, Long), inMu: Boolean = true): (Long, Long) = {
assertInMuMemory(inMu, addr)
val oldVal = loadI128(addr)
storeI128(addr, desired)
val oldVal = loadI128(addr, inMu)
storeI128(addr, desired, inMu)
return oldVal
}
}
......@@ -63,27 +63,29 @@ object TypeSizes {
def sizeOf(ty: Type): Word = ty match {
case TypeInt(l) => intBitsToBytes(l)
case _:TypeFloat => 4L
case _:TypeDouble => 8L
case _:TypeRef => WORD_SIZE_BYTES
case _:TypeIRef => 2L * WORD_SIZE_BYTES
case _:TypeWeakRef => WORD_SIZE_BYTES
case _: TypeFloat => 4L
case _: TypeDouble => 8L
case _: TypeRef => WORD_SIZE_BYTES
case _: TypeIRef => 2L * WORD_SIZE_BYTES
case _: TypeWeakRef => WORD_SIZE_BYTES
case t @ TypeStruct(ftys) => structPrefixSizeOf(t, ftys.size)
case t @ TypeArray(et,l) => seqPrefixSizeOf(t, l)
case _:TypeHybrid => throw new IllegalArgumentException("Hybrid should use hybridSizeOf to probe size")
case _:TypeVoid => 0L
case _:TypeFunc => WORD_SIZE_BYTES
case _:TypeThread => WORD_SIZE_BYTES
case _:TypeStack => WORD_SIZE_BYTES
case _:TypeTagRef64 => 8L
case t @ TypeVector(et,l) => seqPrefixSizeOf(t, l)
case t @ TypeArray(et, l) => seqPrefixSizeOf(t, l)
case _: TypeHybrid => throw new IllegalArgumentException("Hybrid should use hybridSizeOf to probe size")
case _: TypeVoid => 0L
case _: TypeFunc => WORD_SIZE_BYTES
case _: TypeThread => WORD_SIZE_BYTES
case _: TypeStack => WORD_SIZE_BYTES
case _: TypeTagRef64 => 8L
case t @ TypeVector(et, l) => seqPrefixSizeOf(t, l)
case _: TypePtr => WORD_SIZE_BYTES
case _: TypeFuncPtr => WORD_SIZE_BYTES
}
def alignOf(ty: Type): Word = ty match {
case TypeStruct(ftys) => ftys.map(sizeOf).max
case TypeArray(et,_) => alignOf(et)
case _:TypeHybrid => throw new IllegalArgumentException("Hybrid should use hybridAlignOf to probe alignment")
case _:TypeVoid => 1L
case TypeArray(et, _) => alignOf(et)
case _: TypeHybrid => throw new IllegalArgumentException("Hybrid should use hybridAlignOf to probe alignment")
case _: TypeVoid => 1L
case _ => sizeOf(ty)
}
......
......@@ -87,5 +87,6 @@ abstract class UvmBundleTesterBase extends FlatSpec with Matchers {
def asTR64Box: BoxTagRef64 = vb.asInstanceOf[BoxTagRef64]
def asTR64Raw: Long = vb.asInstanceOf[BoxTagRef64].raw
def asVec: Seq[ValueBox] = vb.asInstanceOf[BoxVector].values
def asPointer: Word = vb.asInstanceOf[BoxPointer].addr
}
}
\ No newline at end of file
......@@ -654,7 +654,7 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
testFunc(ca, func, Seq(a0, a1, a2, a3)) { (ca, th, st, wp) =>
val Seq(trunc, zext, sext, fptrunc, fpext, fptoui1, fptosi1, fptoui2, fptosi2, uitofp, sitofp,
bitcast1, bitcast2, bitcast3, bitcast4) = ca.dumpKeepalives(st, 0)
bitcast1, bitcast2, bitcast3, bitcast4, ptrcast1, ptrcast2, ptrcast3) = ca.dumpKeepalives(st, 0)
trunc.vb.asUInt(32) shouldBe 0x9abcdef0L
zext.vb.asUInt(64) shouldBe 0xfedcba98L
......@@ -675,16 +675,24 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
bitcast3.vb.asFloat shouldBe 1.5f
bitcast4.vb.asDouble shouldBe 6.25d
ptrcast1.vb.asPointer shouldBe 0x123456789abcdef0L
ptrcast2.vb.asPointer shouldBe 0x123456789abcdef0L
ptrcast3.vb.asSInt(64) shouldBe 0x123456789abcdef0L
TrapRebindPassVoid(st)
}
val a5 = ca.putInt("@i64", -0x123456789abcdef0L)
testFunc(ca, func, Seq(a0, a5, a2, a3)) { (ca, th, st, wp) =>
val Seq(trunc, zext, sext, fptrunc, fpext, fptoui1, fptosi1, fptoui2, fptosi2, uitofp, sitofp,
bitcast1, bitcast2, bitcast3, bitcast4) = ca.dumpKeepalives(st, 0)
bitcast1, bitcast2, bitcast3, bitcast4, ptrcast1, ptrcast2, ptrcast3) = ca.dumpKeepalives(st, 0)
sitofp.vb.asDouble shouldBe (-0x123456789abcdef0L).doubleValue()
ptrcast1.vb.asPointer shouldBe -0x123456789abcdef0L
ptrcast2.vb.asPointer shouldBe -0x123456789abcdef0L
ptrcast3.vb.asSInt(64) shouldBe -0x123456789abcdef0L
TrapRebindPassVoid(st)
}
......@@ -917,7 +925,7 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
ca.close()
}
"GETIREF, GETFIELDIREF, GITELEMIREF, SHIFTIREF, GETFIXEDPARTIREF AND GETVARPARTIREF" should "work" in {
"GETIREF, GETFIELDIREF, GITELEMIREF, SHIFTIREF, GETFIXEDPARTIREF AND GETVARPARTIREF" should "work with iref" in {
implicit def typeOf(name: String): Type = microVM.globalBundle.typeNs(name)
implicit def structTypeOf(name: String): TypeStruct = typeOf(name).asInstanceOf[TypeStruct]
implicit def seqTypeOf(name: String): AbstractSeqType = typeOf(name).asInstanceOf[AbstractSeqType]
......@@ -946,7 +954,40 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
ca.close()
}
"LOAD and STORE" should "work in good cases" in {
"GETIREF, GETFIELDIREF, GITELEMIREF, SHIFTIREF, GETFIXEDPARTIREF AND GETVARPARTIREF" should "work with pointers" in {
implicit def typeOf(name: String): Type = microVM.globalBundle.typeNs(name)
implicit def structTypeOf(name: String): TypeStruct = typeOf(name).asInstanceOf[TypeStruct]
implicit def seqTypeOf(name: String): AbstractSeqType = typeOf(name).asInstanceOf[AbstractSeqType]
implicit def hybridTypeOf(name: String): TypeHybrid = typeOf(name).asInstanceOf[TypeHybrid]
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAddressingPtr")
testFunc(ca, func, Seq()) { (ca, th, st, wp) =>
val Seq(barPtr, bazPtr, jaPtr, bar3, baz3, baz6, jaFix, jaVar) = ca.dumpKeepalives(st, 0)
val base = 1024L
barPtr.vb.asPointer shouldEqual base
bazPtr.vb.asPointer shouldEqual base
jaPtr.vb.asPointer shouldEqual base
bar3.vb.asPointer shouldEqual (base + TypeSizes.fieldOffsetOf("@StructBar", 3))
baz3.vb.asPointer shouldEqual (base + TypeSizes.elemOffsetOf("@ArrayBaz", 3))
baz6.vb.asPointer shouldEqual (base + TypeSizes.elemOffsetOf("@ArrayBaz", 6))
jaFix.vb.asPointer shouldEqual (base)
jaVar.vb.asPointer shouldEqual (base + TypeSizes.varPartOffsetOf("@JavaLikeByteArray"))
TrapRebindPassVoid(st)
}
ca.close()
}
"LOAD and STORE" should "work with iref in good cases" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessing")
......@@ -971,7 +1012,41 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
ca.close()
}
"CMPXCHG and ATOMICRMW" should "work in good cases" in {
"LOAD and STORE" should "work with pointer in good cases" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessingPtr")
val myms = new MemorySupport(1024)
val begin = myms.muMemoryBegin
val a0 = ca.putPointer("@ptri8", begin)
val a1 = ca.putPointer("@ptri16", begin + 8L)
val a2 = ca.putPointer("@ptri32", begin + 16L)
val a3 = ca.putPointer("@ptri64", begin + 32L)
val a4 = ca.putPointer("@ptrfloat", begin + 40L)
val a5 = ca.putPointer("@ptrdouble", begin + 48L)
val a6 = ca.putPointer("@ptrptrvoid", begin + 56L)
val a7 = ca.putPointer("@ptrfpi_i", begin + 64L)
testFunc(ca, func, Seq(a0, a1, a2, a3, a4, a5, a6, a7)) { (ca, th, st, wp) =>
val Seq(li8, li16, li32, li64, lf, ld, lp, lfp) = ca.dumpKeepalives(st, 0)
li8.vb.asSInt(8) shouldBe 41
li16.vb.asSInt(16) shouldBe 42
li32.vb.asSInt(32) shouldBe 43
li64.vb.asSInt(64) shouldBe 44
lf.vb.asFloat shouldBe 45.0f
ld.vb.asDouble shouldBe 46.0d
lp.vb.asPointer shouldBe 0x55aaL
lfp.vb.asPointer shouldBe 0x55aaL
TrapRebindPassVoid(st)
}
ca.close()
}
"CMPXCHG and ATOMICRMW" should "work with iref in good cases" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessingAtomic")
......@@ -1014,6 +1089,64 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
ca.close()
}
"CMPXCHG and ATOMICRMW" should "work with pointer in good cases" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessingAtomicPtr")
val myms = new MemorySupport(1024)
val begin = myms.muMemoryBegin
val a0 = ca.putPointer("@ptri8", begin)
val a1 = ca.putPointer("@ptri16", begin + 8L)
val a2 = ca.putPointer("@ptri32", begin + 16L)
val a3 = ca.putPointer("@ptri64", begin + 32L)
val a4 = ca.putPointer("@ptrfloat", begin + 40L)
val a5 = ca.putPointer("@ptrdouble", begin + 48L)
val a6 = ca.putPointer("@ptrptrvoid", begin + 56L)
val a7 = ca.putPointer("@ptrfpi_i", begin + 64L)
testFunc(ca, func, Seq(a0, a1, a2, a3, a4, a5, a6, a7)) { (ca, th, st, wp) =>
val kas = ca.dumpKeepalives(st, 0)
// Scala limits unpacking of Seq to 22 elements
val Seq(cx32_1, cx32_2, cx64_1, cx64_2, l32, l64, cxp_1, cxp_2, cxfp_1, cxfp_2, lp, lfp,
rmw0, rmw1, rmw2, rmw3, rmw4, rmw5, rmw6, rmw7, rmw8, rmw9) = kas.take(22)
val Seq(rmwA, l64_2) = kas.drop(22)
cx32_1.vb.asSInt(32) shouldBe 43
cx32_2.vb.asSInt(32) shouldBe 53
cx64_1.vb.asSInt(64) shouldBe 44
cx64_2.vb.asSInt(64) shouldBe 54
l32.vb.asSInt(32) shouldBe 53
l64.vb.asSInt(64) shouldBe 54
cxp_1.vb.asPointer shouldBe 0x55abL
cxp_2.vb.asPointer shouldBe 0x5a5aL
cxfp_1.vb.asPointer shouldBe 0x55abL
cxfp_2.vb.asPointer shouldBe 0x5a5aL
lp.vb.asPointer shouldBe 0x5a5aL
lfp.vb.asPointer shouldBe 0x5a5aL
rmw0.vb.asSInt(64) shouldBe 1L
rmw1.vb.asSInt(64) shouldBe 0x55abL
rmw2.vb.asSInt(64) shouldBe 0x55aeL
rmw3.vb.asSInt(64) shouldBe 0x55aaL
rmw4.vb.asSInt(64) shouldBe 0x500aL
rmw5.vb.asSInt(64) shouldBe ~0x500aL
rmw6.vb.asSInt(64) shouldBe ~0x000aL
rmw7.vb.asSInt(64) shouldBe ~0x55a0L
rmw8.vb.asSInt(64) shouldBe -0x7fffffffffffffdeL
rmw9.vb.asSInt(64) shouldBe 42L
rmwA.vb.asSInt(64) shouldBe 11L
l64_2.vb.asSInt(64) shouldBe 0xffffffffffffffdeL
TrapRebindPassVoid(st)
}
ca.close()
}
"LOAD, STORE, CMPXCHG and ATOMICRMW" should "jump to the exceptional destination on NULL ref access" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessingNull")
......