WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

Commit 64867984 authored by Kunshan Wang's avatar Kunshan Wang
Browse files

Memory accessing instructions.

parent ff938bc6
......@@ -574,14 +574,68 @@ class InterpreterThread(val id: Int, microVM: MicroVM, initialStack: Interpreter
}
case i @ InstLoad(ord, referentTy, loc, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.load(uty, la, ib, microVM)
continueNormally()
}
}
case i @ InstStore(ord, referentTy, loc, newVal, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val nvb = boxOf(newVal)
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.store(uty, la, nvb, ib, microVM)
continueNormally()
}
}
case i @ InstCmpXchg(weak, ordSucc, ordFail, referentTy, loc, expected, desired, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val eb = boxOf(expected)
val db = boxOf(desired)
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.cmpXchg(uty, la, eb, db, ib, microVM)
continueNormally()
}
}
case i @ InstAtomicRMW(ord, op, referentTy, loc, opnd, excClause) => {
val uty = InternalTypePool.unmarkedOf(referentTy)
val lb = boxOf(loc).asInstanceOf[BoxIRef]
val ob = boxOf(opnd)
val ib = boxOf(i)
val la = lb.objRef + lb.offset
if (la == 0L) {
nullRefError(excClause)
} else {
MemoryOperations.atomicRMW(uty, op, la, ob, ib, microVM)
continueNormally()
}
}
case i @ InstFence(ord) => {
// No-op in this interpreter
continueNormally()
}
// Indentation guide: Insert more instructions here.
......@@ -791,18 +845,16 @@ class InterpreterThread(val id: Int, microVM: MicroVM, initialStack: Interpreter
f
} catch {
case e: UvmOutOfMemoryException => {
excClause match {
case None => throw new UvmRuntimeException(ctx + "Out of memory and there is no handler.", e)
case Some(ExcClause(_, excBB)) => branchAndMovePC(excBB, 0L)
branchToExcDestOr(excClause) {
throw new UvmRuntimeException(ctx + "Out of memory and there is no handler.", e)
}
}
}
}
private def nullRefError(excClause: Option[ExcClause]): Unit = {
excClause match {
case None => throw new UvmRuntimeException(ctx + "Accessing null reference.")
case Some(ExcClause(_, excBB)) => branchAndMovePC(excBB, 0L)
branchToExcDestOr(excClause) {
throw new UvmRuntimeException(ctx + "Accessing null reference.")
}
}
}
......@@ -87,8 +87,8 @@ object MemorySupport {
case XOR => oldVal ^ opnd
case MAX => Math.max(oldVal, opnd)
case MIN => Math.min(oldVal, opnd)
case UMAX => Math.max(oldVal - Int.MinValue, opnd - Int.MinValue) + Int.MinValue
case UMIN => Math.min(oldVal - Int.MinValue, opnd - Int.MinValue) + Int.MinValue
case UMAX => Math.max(oldVal - Long.MinValue, opnd - Long.MinValue) + Long.MinValue
case UMIN => Math.min(oldVal - Long.MinValue, opnd - Long.MinValue) + Long.MinValue
}
storeLong(loc, newVal)
return oldVal
......
......@@ -833,11 +833,10 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
baz3.vb.asIRefAddr shouldEqual (bazIRef.vb.asIRefAddr + TypeSizes.elemOffsetOf("@ArrayBaz", 3))
baz6.vb.asIRefAddr shouldEqual (bazIRef.vb.asIRefAddr + TypeSizes.elemOffsetOf("@ArrayBaz", 6))
jaIRef.vb.asIRefAddr shouldEqual (jaRef.vb.asRef)
jaFix.vb.asIRefAddr shouldEqual (jaRef.vb.asRef)
jaVar.vb.asIRefAddr shouldEqual (jaRef.vb.asRef + TypeSizes.varPartOffsetOf("@JavaLikeByteArray"))
TrapRebindPassVoid(st)
}
......@@ -845,4 +844,88 @@ class UvmInterpreterSpec extends UvmBundleTesterBase {
ca.close()
}
"LOAD and STORE" should "work in good cases" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessing")
testFunc(ca, func, Seq()) { (ca, th, st, wp) =>
val Seq(voidR, voidIR, li8, li16, li32, li64, lf, ld, lr, lir, lwr, lfunc) = ca.dumpKeepalives(st, 0)
li8.vb.asSInt(8) shouldBe 41
li16.vb.asSInt(16) shouldBe 42
li32.vb.asSInt(32) shouldBe 43
li64.vb.asSInt(64) shouldBe 44
lf.vb.asFloat shouldBe 45.0f
ld.vb.asDouble shouldBe 46.0d
lr.vb.asRef shouldBe voidR.vb.asRef
lir.vb.asIRef shouldBe voidIR.vb.asIRef
lwr.vb.asRef shouldBe voidR.vb.asRef
lfunc.vb.asFunc shouldBe Some(microVM.globalBundle.funcNs("@memAccessing"))
TrapRebindPassVoid(st)
}
ca.close()
}
"CMPXCHG and ATOMICRMW" should "work in good cases" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessingAtomic")
testFunc(ca, func, Seq()) { (ca, th, st, wp) =>
val kas = ca.dumpKeepalives(st, 0)
// Scala limits unpacking of Seq to 22 elements
val Seq(voidR, voidR2, voidR3, cx32_1, cx32_2, cx64_1, cx64_2, l32, l64, cxr_1, cxr_2, lr,
rmw0, rmw1, rmw2, rmw3, rmw4, rmw5, rmw6, rmw7, rmw8, rmw9) = kas.take(22)
val Seq(rmwA, l64_2) = kas.drop(22)
cx32_1.vb.asSInt(32) shouldBe 43
cx32_2.vb.asSInt(32) shouldBe 53
cx64_1.vb.asSInt(64) shouldBe 44
cx64_2.vb.asSInt(64) shouldBe 54
l32.vb.asSInt(32) shouldBe 53
l64.vb.asSInt(64) shouldBe 54
cxr_1.vb.asRef shouldBe voidR.vb.asRef
cxr_2.vb.asRef shouldBe voidR2.vb.asRef
lr.vb.asRef shouldBe voidR2.vb.asRef
rmw0.vb.asSInt(64) shouldBe 1L
rmw1.vb.asSInt(64) shouldBe 0x55abL
rmw2.vb.asSInt(64) shouldBe 0x55aeL
rmw3.vb.asSInt(64) shouldBe 0x55aaL
rmw4.vb.asSInt(64) shouldBe 0x500aL
rmw5.vb.asSInt(64) shouldBe ~0x500aL
rmw6.vb.asSInt(64) shouldBe ~0x000aL
rmw7.vb.asSInt(64) shouldBe ~0x55a0L
rmw8.vb.asSInt(64) shouldBe -0x7fffffffffffffdeL
rmw9.vb.asSInt(64) shouldBe 42L
rmwA.vb.asSInt(64) shouldBe 11L
l64_2.vb.asSInt(64) shouldBe 0xffffffffffffffdeL
TrapRebindPassVoid(st)
}
ca.close()
}
"LOAD, STORE, CMPXCHG and ATOMICRMW" should "jump to the exceptional destination on NULL ref access" in {
val ca = microVM.newClientAgent()
val func = ca.putFunction("@memAccessingNull")
testFunc(ca, func, Seq()) { (ca, th, st, wp) =>
nameOf(ca.currentInstruction(st, 0)) match {
case "@memAccessingNull_v1.trap_exit" => {}
case "@memAccessingNull_v1.trap_unreachable" => fail("Reached %trap_unreachable")
case n => fail("Unexpected trap " + n)
}
TrapRebindPassVoid(st)
}
ca.close()
}
}
\ No newline at end of file
......@@ -544,7 +544,11 @@
.const @I8_41 <@i8> = 41
.const @I16_42 <@i16> = 42
.const @I32_43 <@i32> = 43
.const @I32_53 <@i32> = 53
.const @I32_63 <@i32> = 63
.const @I64_44 <@i64> = 44
.const @I64_54 <@i64> = 54
.const @I64_64 <@i64> = 64
.const @F_45 <@float> = 45.0f
.const @D_46 <@double> = 46.0d
......@@ -581,57 +585,88 @@
%trap = TRAP <@void> KEEPALIVE (%void_r %void_ir %li8 %li16
%li32 %li64 %lf %ld %lr %lir %lwr %lfunc)
COMMINST @uvm.thread_exit
}
.const @I64_0x55ab <@i64> = 0x55ab
.const @I64_0x5a5a <@i64> = 0x5a5a
.const @I64_0x5000 <@i64> = 0x5000
.const @I64_0x55aa <@i64> = 0x55aa
.const @I64_N0x7fffffffffffffde <@i64> = -0x7fffffffffffffde
.const @I64_42 <@i64> = 42
.const @I64_11 <@i64> = 11
.const @I64_0xffffffffffffffde <@i64> = 0xffffffffffffffde
.funcdef @memAccessingAtomic VERSION @memAccessingAtomic_v1 <@noparamsnoret> () {
%entry:
STORE SEQ_CST <@i32> @g_i32 @I32_43
STORE SEQ_CST <@i64> @g_i64 @I64_44
%void_r = NEW <@void>
%void_r2 = NEW <@void>
%void_r3 = NEW <@void>
STORE <@refvoid> @g_r %void_r
%cx32_1 = CMPXCHG SEQ_CST RELAXED <@i32> @g_i32 @I32_43 @I32_53
%cx32_2 = CMPXCHG SEQ_CST RELAXED <@i32> @g_i32 @I32_43 @I32_63
%cx64_1 = CMPXCHG SEQ_CST RELAXED <@i64> @g_i64 @I64_44 @I64_54
%cx64_2 = CMPXCHG SEQ_CST RELAXED <@i64> @g_i64 @I64_44 @I64_64
%l32 = LOAD SEQ_CST <@i32> @g_i32
%l64 = LOAD SEQ_CST <@i64> @g_i64
%cxr_1 = CMPXCHG SEQ_CST RELAXED <@refvoid> @g_r %void_r %void_r2
%cxr_2 = CMPXCHG SEQ_CST RELAXED <@refvoid> @g_r %void_r %void_r3
%lr = LOAD <@refvoid> @g_r
STORE <@i64> @g_i64 @I64_1
%rmw0 = ATOMICRMW SEQ_CST XCHG <@i64> @g_i64 @I64_0x55ab // 1 -> 0x55ab
%rmw1 = ATOMICRMW SEQ_CST ADD <@i64> @g_i64 @I64_3 // 0x55ab -> 0x55ae
%rmw2 = ATOMICRMW SEQ_CST SUB <@i64> @g_i64 @I64_4 // 0x55ae -> 0x55aa
%rmw3 = ATOMICRMW SEQ_CST AND <@i64> @g_i64 @I64_0x5a5a // 0x55aa -> 0x500a
%rmw4 = ATOMICRMW SEQ_CST NAND <@i64> @g_i64 @I64_0x5a5a // 0x500a -> ~0x500a
%rmw5 = ATOMICRMW SEQ_CST OR <@i64> @g_i64 @I64_0x5000 // ~0x500a -> ~0x000a
%rmw6 = ATOMICRMW SEQ_CST XOR <@i64> @g_i64 @I64_0x55aa // ~0x000a -> ~0x55a0
%rmw7 = ATOMICRMW SEQ_CST MIN <@i64> @g_i64 @I64_N0x7fffffffffffffde // ~0x55a0 -> -0x7fffffffffffffde
%rmw8 = ATOMICRMW SEQ_CST MAX <@i64> @g_i64 @I64_42 // -0x7fffffffffffffde -> 42
%rmw9 = ATOMICRMW SEQ_CST UMIN <@i64> @g_i64 @I64_11 // 42 -> 11
%rmwA = ATOMICRMW SEQ_CST UMAX <@i64> @g_i64 @I64_0xffffffffffffffde // 11 -> 0xffffffffffffffde
%l64_2 = LOAD SEQ_CST <@i64> @g_i64
%trap = TRAP <@void> KEEPALIVE (%void_r %void_r2 %void_r3
%cx32_1 %cx32_2 %cx64_1 %cx64_2 %l32 %l64 %cxr_1 %cxr_2 %lr
%rmw0 %rmw1 %rmw2 %rmw3 %rmw4 %rmw5 %rmw6 %rmw7 %rmw8 %rmw9 %rmwA %l64_2)
COMMINST @uvm.thread_exit
}
.const @NULLIREF_I64 <@irefi64> = NULL
.funcdef @memAccessingNull VERSION @memAccessingNull_v1 <@noparamsnoret> () {
%entry:
%l = LOAD <@i64> @NULLIREF_I64 EXC(%unreachable %bb2)
%bb2:
%s = STORE <@i64> @NULLIREF_I64 @I64_0 EXC(%unreachable %bb3)
%bb3:
%c = CMPXCHG SEQ_CST RELAXED <@i64> @NULLIREF_I64 @I64_0 @I64_1 EXC(%unreachable %bb4)
%bb4:
%a = ATOMICRMW SEQ_CST XCHG <@i64> @NULLIREF_I64 @I64_0 EXC(%unreachable %exit)
%exit:
%trap_exit = TRAP <@void>
COMMINST @uvm.thread_exit
%unreachable:
%trap_unreachable = TRAP <@void>
COMMINST @uvm.thread_exit
}
// .funcsig @memAccessingAtomic_sig = @noparamsnoret
// .funcdef @memAccessingAtomic VERSION @memAccessingAtomic_v1 <@memAccessingAtomic_sig> () {
// %entry:
// STORE SEQ_CST <@i32> @g_i32 43
// STORE SEQ_CST <@i64> @g_i64 44
//
// %void_r = NEW <@void>
// %void_r2 = NEW <@void>
// %void_r3 = NEW <@void>
//
// STORE <@refvoid> @g_r %void_r
//
// %cx32_1 = CMPXCHG SEQ_CST UNORDERED <@i32> @g_i32 43 53
// %cx32_2 = CMPXCHG SEQ_CST UNORDERED <@i32> @g_i32 43 63
// %cx64_1 = CMPXCHG SEQ_CST UNORDERED <@i64> @g_i64 44 54
// %cx64_2 = CMPXCHG SEQ_CST UNORDERED <@i64> @g_i64 44 64
//
// %l32 = LOAD SEQ_CST <@i32> @g_i32
// %l64 = LOAD SEQ_CST <@i64> @g_i64
//
// %cxr_1 = CMPXCHG SEQ_CST UNORDERED <@refvoid> @g_r %void_r %void_r2
// %cxr_2 = CMPXCHG SEQ_CST UNORDERED <@refvoid> @g_r %void_r %void_r3
//
// %lr = LOAD <@refvoid> @g_r
//
// STORE <@i64> @g_i64 1
//
// %rmw0 = ATOMICRMW SEQ_CST XCHG <@i64> @g_i64 0x55ab // 1 -> 0x55ab
// %rmw1 = ATOMICRMW SEQ_CST ADD <@i64> @g_i64 3 // 0x55ab -> 0x55ae
// %rmw2 = ATOMICRMW SEQ_CST SUB <@i64> @g_i64 4 // 0x55ae -> 0x55aa
// %rmw3 = ATOMICRMW SEQ_CST AND <@i64> @g_i64 0x5a5a // 0x55aa -> 0x500a
// %rmw4 = ATOMICRMW SEQ_CST NAND <@i64> @g_i64 0x5a5a // 0x500a -> ~0x500a
// %rmw5 = ATOMICRMW SEQ_CST OR <@i64> @g_i64 0x5000 // ~0x500a -> ~0x000a
// %rmw6 = ATOMICRMW SEQ_CST XOR <@i64> @g_i64 0x55aa // ~0x000a -> ~0x55a0
// %rmw7 = ATOMICRMW SEQ_CST MIN <@i64> @g_i64 -0x7fffffffffffffde // ~0x55a0 -> -0x7fffffffffffffde
// %rmw8 = ATOMICRMW SEQ_CST MAX <@i64> @g_i64 42 // -0x7fffffffffffffde -> 42
// %rmw9 = ATOMICRMW SEQ_CST UMIN <@i64> @g_i64 11 // 42 -> 11
// %rmwA = ATOMICRMW SEQ_CST UMAX <@i64> @g_i64 0xffffffffffffffde // 11 -> 0xffffffffffffffde
//
// %l64_2 = LOAD SEQ_CST <@i64> @g_i64
//
// %trap = TRAP <@void> KEEPALIVE (%void_r %void_r2 %void_r3
// %cx32_1 %cx32_2 %cx64_1 %cx64_2 %l32 %l64 %cxr_1 %cxr_2 %lr
// %rmw0 %rmw1 %rmw2 %rmw3 %rmw4 %rmw5 %rmw6 %rmw7 %rmw8 %rmw9 %rmwA %l64_2)
// %exit:
// COMMINST @uvm.thread_exit
// }
//
// .funcsig @watchpointtest_sig = @noparamsnoret
// .funcdef @watchpointtest VERSION @watchpointtest_v1 <@watchpointtest_sig> () {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment