WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.6% of users enabled 2FA.

Commit 1c9f0697 authored by Kunshan Wang's avatar Kunshan Wang
Browse files

More memory operation tests and object layout test.

parent f0aed849
package uvm.refimpl.mem
import org.scalatest._
import uvm._
import uvm.types._
import uvm.refimpl._
import uvm.refimpl.mem._
import TypeSizes._
class UvmMemLayoutSpec extends FlatSpec with Matchers with BeforeAndAfter {
"The sizes of primitive types" should "be appropriate" in {
sizeOf(TypeInt(8)) shouldBe 1
sizeOf(TypeInt(16)) shouldBe 2
sizeOf(TypeInt(32)) shouldBe 4
sizeOf(TypeInt(64)) shouldBe 8
sizeOf(TypeFloat()) shouldBe 4
sizeOf(TypeDouble()) shouldBe 8
sizeOf(TypeRef(TypeVoid())) shouldBe 8
sizeOf(TypeIRef(TypeVoid())) shouldBe 16
sizeOf(TypeWeakRef(TypeVoid())) shouldBe 8
sizeOf(TypeVoid()) shouldBe 0
sizeOf(TypeFunc(FuncSig(TypeVoid(), Seq()))) shouldBe 8
sizeOf(TypeThread()) shouldBe 8
sizeOf(TypeStack()) shouldBe 8
sizeOf(TypeTagRef64()) shouldBe 8
}
"The alignment of primitive types" should "be appropriate" in {
alignOf(TypeInt(8)) shouldBe 1
alignOf(TypeInt(16)) shouldBe 2
alignOf(TypeInt(32)) shouldBe 4
alignOf(TypeInt(64)) shouldBe 8
alignOf(TypeFloat()) shouldBe 4
alignOf(TypeDouble()) shouldBe 8
alignOf(TypeRef(TypeVoid())) shouldBe 8
alignOf(TypeIRef(TypeVoid())) shouldBe 16
alignOf(TypeWeakRef(TypeVoid())) shouldBe 8
alignOf(TypeVoid()) shouldBe 1
alignOf(TypeFunc(FuncSig(TypeVoid(), Seq()))) shouldBe 8
alignOf(TypeThread()) shouldBe 8
alignOf(TypeStack()) shouldBe 8
alignOf(TypeTagRef64()) shouldBe 8
}
"Struct types" should "have the size of all members plus padding and the alignment of the most strict member" in {
val ty = TypeStruct(Seq(TypeInt(8), TypeInt(16), TypeInt(32), TypeInt(64)))
sizeOf(ty) shouldBe 16
alignOf(ty) shouldBe 8
}
"Array types" should "have the size of all elements plus padding and the alignment of its element" in {
val ty = TypeArray(TypeInt(64), 100)
sizeOf(ty) shouldBe 800
alignOf(ty) shouldBe 8
}
"Vector types" should "have the size of all elements and the alignment of its own size" in {
val ty = TypeVector(TypeInt(32), 4)
sizeOf(ty) shouldBe 16
alignOf(ty) shouldBe 16
}
"The offset of struct fields" should "go past all previous fields and align to the current field" in {
val ty = TypeStruct(Seq(TypeInt(8), TypeInt(16), TypeInt(32), TypeInt(64)))
fieldOffsetOf(ty, 0) shouldBe 0
fieldOffsetOf(ty, 1) shouldBe 2
fieldOffsetOf(ty, 2) shouldBe 4
fieldOffsetOf(ty, 3) shouldBe 8
}
"The offset of array elements" should "be as if shifting by the element size and aligned at each element" in {
val ty = TypeArray(TypeInt(64), 100)
elemOffsetOf(ty, 0L) shouldBe 0
elemOffsetOf(ty, 50L) shouldBe 400
}
"In a hybrid, fields" should "be laid out in the fixed-then-var fasion" in {
val ty = TypeHybrid(TypeInt(16), TypeDouble())
hybridSizeOf(ty, 10) shouldBe 88
hybridAlignOf(ty, 10) shouldBe 8
fixedPartOffsetOf(ty) shouldBe 0
varPartOffsetOf(ty) shouldBe 8
}
}
\ No newline at end of file
......@@ -10,7 +10,7 @@ import uvm.refimpl.itpr._
import MemoryOrder._
import AtomicRMWOptr._
class UvmMemSpec extends FlatSpec with Matchers with BeforeAndAfter {
class UvmMemOperationsSpec extends FlatSpec with Matchers with BeforeAndAfter {
// The heap size is intentionally reduced to make GC more often
// The heap is divided in two halves. There is a 256KiB small object space (with 8 32KiB blocks) and a 256KiB large
......@@ -264,6 +264,30 @@ class UvmMemSpec extends FlatSpec with Matchers with BeforeAndAfter {
val v = ca.toDouble(hv)
v shouldEqual vd(i)
}
val hSz = ca.putInt("@i32", 10)
val hh = ca.newHybrid("@h1", hSz)
val hhi = ca.getIRef(hh)
val hf = ca.getFixedPartIRef(hhi)
val hv0 = ca.getVarPartIRef(hhi)
val hfix0 = ca.getFieldIRef(hf, 0)
val hfix1 = ca.getFieldIRef(hf, 1)
val hOff = ca.putInt("@i16", 5)
val hv5 = ca.shiftIRef(hv0, hOff)
for (i <- 0 until 10) {
val hI = ca.putInt("@i64", i)
val hHybElem = ca.shiftIRef(hv0, hI)
ca.store(NOT_ATOMIC, hHybElem, hI)
val hIOut = ca.load(NOT_ATOMIC, hHybElem)
val hIOutVal = ca.toInt(hIOut, true)
hIOutVal.intValue shouldEqual i
}
ca.close()
}
......@@ -414,7 +438,7 @@ class UvmMemSpec extends FlatSpec with Matchers with BeforeAndAfter {
testIntAtomicRMW("@i32", hf2, ADD, 1, 2, 3)
testIntAtomicRMW("@i32", hf2, SUB, 3, 2, 1)
testIntAtomicRMW("@i32", hf2, AND, 0x55aa, 0x5a5a, 0x500a)
testIntAtomicRMW("@i32", hf2, NAND, 0x55aa, 0x5a5a, ~0x500a, signed=true)
testIntAtomicRMW("@i32", hf2, NAND, 0x55aa, 0x5a5a, ~0x500a, signed = true)
testIntAtomicRMW("@i32", hf2, OR, 0x55aa, 0x5a5a, 0x5ffa)
testIntAtomicRMW("@i32", hf2, XOR, 0x55aa, 0x5a5a, 0x0ff0)
testIntAtomicRMW("@i32", hf2, MIN, -3, -2, -3, signed = true)
......@@ -426,7 +450,7 @@ class UvmMemSpec extends FlatSpec with Matchers with BeforeAndAfter {
testIntAtomicRMW("@i64", hf3, ADD, 1, 2, 3)
testIntAtomicRMW("@i64", hf3, SUB, 3, 2, 1)
testIntAtomicRMW("@i64", hf3, AND, 0x55aa, 0x5a5a, 0x500a)
testIntAtomicRMW("@i64", hf3, NAND, 0x55aa, 0x5a5a, ~0x500a, signed=true)
testIntAtomicRMW("@i64", hf3, NAND, 0x55aa, 0x5a5a, ~0x500a, signed = true)
testIntAtomicRMW("@i64", hf3, OR, 0x55aa, 0x5a5a, 0x5ffa)
testIntAtomicRMW("@i64", hf3, XOR, 0x55aa, 0x5a5a, 0x0ff0)
testIntAtomicRMW("@i64", hf3, MIN, -3, -2, -3, signed = true)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment