Commit 210ee90e authored by Kunshan Wang's avatar Kunshan Wang

Calling native functions.

parent 25c803ec
......@@ -13,6 +13,8 @@ libraryDependencies := Seq(
"com.typesafe.scala-logging" %% "scala-logging" % "3.1.0",
"ch.qos.logback" % "logback-classic" % "1.1.2",
"com.github.jnr" % "jnr-ffi" % "2.0.3",
"com.github.jnr" % "jffi" % "1.2.9",
"com.github.jnr" % "jnr-posix" % "3.0.17",
"org.scalatest" %% "scalatest" % "2.2.0"
)
......
......@@ -7,6 +7,7 @@ import uvm.refimpl.mem.TypeSizes.Word
import scala.collection.mutable.HashSet
import uvm.ir.textinput.UIRTextReader
import uvm.ir.textinput.IDFactory
import uvm.refimpl.nat.NativeHelper
object MicroVM {
val DEFAULT_HEAP_SIZE: Word = 4L * 1024L * 1024L; // 4MiB
......@@ -26,6 +27,8 @@ class MicroVM(heapSize: Word = MicroVM.DEFAULT_HEAP_SIZE,
val memoryManager = new MemoryManager(heapSize, globalSize, stackSize)
private implicit val memorySupport = memoryManager.memorySupport
val nativeHelper = new NativeHelper()
val threadStackManager = new ThreadStackManager()
val trapManager = new TrapManager()
......
......@@ -497,7 +497,7 @@ class ClientAgent(mutator: Mutator)(
}
val box = newType match {
case TypeInt(n) => new BoxInt(OpHelper.trunc(BigInt(addr), n))
case TypeInt(n) => new BoxInt(OpHelper.trunc(BigInt(addr), Math.min(n, 64)))
case _: AbstractPointerType => new BoxPointer(addr)
}
......@@ -505,13 +505,13 @@ class ClientAgent(mutator: Mutator)(
}
def pin(handle: Handle): Handle = {
val (objTy, objRef) = handle.ty match {
case TypeRef(t) => (t, handle.vb.asInstanceOf[BoxRef].objRef)
case TypeIRef(t) => (t, handle.vb.asInstanceOf[BoxIRef].objRef)
val (objTy, (objRef, offset)) = handle.ty match {
case TypeRef(t) => (t, (handle.vb.asInstanceOf[BoxRef].objRef, 0L))
case TypeIRef(t) => (t, handle.vb.asInstanceOf[BoxIRef].oo)
}
pin(objRef)
val ptrTy = InternalTypePool.ptrOf(objTy)
val box = new BoxPointer(objRef)
val box = new BoxPointer(objRef + offset)
newHandle(ptrTy, box)
}
......
......@@ -3,6 +3,7 @@ package uvm.refimpl
import uvm._
import uvm.types._
import uvm.ssavariables._
import uvm.utils.LazyPool
import uvm.ir.textinput.IDFactory
import scala.collection.mutable.HashMap
import uvm.FuncSig
......@@ -40,20 +41,12 @@ object InternalTypes {
}
object InternalTypePool {
class LazyPool[FromT, ToT](factory: FromT => ToT) {
val pool = HashMap[FromT, ToT]()
def apply(obj: FromT): ToT = pool.get(obj).getOrElse(factory(obj))
}
object LazyPool {
def apply[FromT, ToT](factory: FromT => ToT): LazyPool[FromT, ToT] = new LazyPool[FromT, ToT](factory)
}
val refOf = LazyPool(TypeRef)
val irefOf = LazyPool(TypeIRef)
val ptrOf = LazyPool(TypePtr)
val funcOf = LazyPool(TypeFunc)
val funcPtrOf = LazyPool(TypeFuncPtr)
val vecOf = new LazyPool[(Type, Long), TypeVector]({ case (t, l) => TypeVector(t, l) })
val vecOf = LazyPool[(Type, Long), TypeVector] { case (t, l) => TypeVector(t, l) }
def unmarkedOf(t: Type): Type = t match {
case TypeWeakRef(r) => refOf(r)
case _ => t
......
......@@ -373,20 +373,12 @@ class InterpreterThread(val id: Int, initialStack: InterpreterStack, val mutator
case _ =>
}
val srcAddr: Word = scalarFromTy match {
case TypeInt(n) => {
val od = bOpnd.asInstanceOf[BoxInt].value
val truncExt = if (n >= 64) OpHelper.trunc(od, 64) else OpHelper.zext(od, n, 64)
truncExt.toLong
}
case TypePtr(_) | TypeFuncPtr(_) => bOpnd.asInstanceOf[BoxPointer].addr
case TypeInt(n) => bOpnd.asInstanceOf[BoxInt].value.longValue // truncates
case _: AbstractPointerType => bOpnd.asInstanceOf[BoxPointer].addr
}
scalarToTy match {
case TypeInt(n) => {
val bi = BigInt(srcAddr)
val truncExt = if (n > 64) OpHelper.zext(bi, 64, n) else OpHelper.trunc(bi, n)
br.asInstanceOf[BoxInt].value = truncExt
}
case TypePtr(_) | TypeFuncPtr(_) => br.asInstanceOf[BoxPointer].addr = srcAddr
case TypeInt(n) => br.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(srcAddr), Math.min(n, 64))
case _: AbstractPointerType => br.asInstanceOf[BoxPointer].addr = srcAddr
}
}
......@@ -779,7 +771,18 @@ class InterpreterThread(val id: Int, initialStack: InterpreterStack, val mutator
}
case i @ InstCCall(callConv, funcTy, sig, callee, argList, keepAlives) => {
throw new UvmRefImplException(ctx + "The CCALL instruction is not implemented in this reference implementation")
if (callConv != Flag("#DEFAULT")) {
throw new UvmRefImplException(ctx + "Currently only support the #DEFAULT callConv. %s found.".format(callConv.name))
}
val addr = boxOf(callee).asInstanceOf[BoxPointer].addr
val argBoxes = argList.map(boxOf)
val retBox = boxOf(i)
microVM.nativeHelper.callNative(sig, addr, argBoxes, retBox)
continueNormally()
}
case i @ InstNewStack(sig, callee, argList, excClause) => {
......@@ -1064,29 +1067,29 @@ class InterpreterThread(val id: Int, initialStack: InterpreterStack, val mutator
case "@uvm.native.pin" => {
val Seq(ty) = typeList
val Seq(r) = argList
val addr = ty match {
case TypeRef(_) => boxOf(r).asInstanceOf[BoxRef].objRef
case TypeIRef(_) => boxOf(r).asInstanceOf[BoxIRef].objRef
val (addr, offset) = ty match {
case TypeRef(_) => (boxOf(r).asInstanceOf[BoxRef].objRef, 0L)
case TypeIRef(_) => boxOf(r).asInstanceOf[BoxIRef].oo
}
pin(addr)
boxOf(i).asInstanceOf[BoxPointer].addr = addr
boxOf(i).asInstanceOf[BoxPointer].addr = addr + offset
continueNormally()
}
case "@uvm.native.unpin" => {
val Seq(ty) = typeList
val Seq(r) = argList
val addr = ty match {
case TypeRef(_) => boxOf(r).asInstanceOf[BoxRef].objRef
case TypeRef(_) => boxOf(r).asInstanceOf[BoxRef].objRef
case TypeIRef(_) => boxOf(r).asInstanceOf[BoxIRef].objRef
}
unpin(addr)
continueNormally()
}
// Insert more CommInsts here.
......
package uvm.refimpl.nat
import com.kenai.jffi.{ Type => JType, Struct => JStruct, Function => JFunction, HeapInvocationBuffer, Invoker }
import uvm.FuncSig
import uvm.refimpl.UvmRefImplException
import uvm.refimpl.itpr.ValueBox
import uvm.refimpl.mem.TypeSizes.Word
import uvm.types._
import uvm.types.{ Type => MType }
import uvm.utils.LazyPool
import javax.vecmath.Tuple2d
import uvm.refimpl.itpr._
import java.nio.ByteBuffer
import uvm.refimpl.mem.TypeSizes
/**
* Helps calling native functions. Based on JFFI.
*/
class NativeHelper {
val jffiTypePool: LazyPool[MType, JType] = LazyPool {
case TypeVoid() => JType.VOID
case TypeInt(8) => JType.SINT8
case TypeInt(16) => JType.SINT16
case TypeInt(32) => JType.SINT32
case TypeInt(64) => JType.SINT64
case TypeFloat() => JType.FLOAT
case TypeDouble() => JType.DOUBLE
case TypeVector(_, _) => throw new UvmRefImplException("Vectors are not implemented in native calls.")
case TypeStruct(fields) => {
val fieldsNativeTypes: Seq[JType] = fields.map(jffiTypePool.apply)
val strType = JStruct.newStruct(fieldsNativeTypes: _*)
strType
}
case _: AbstractPointerType => JType.POINTER
case t => throw new UvmRefImplException("Type %s cannot be used in native calls.".format(t.repr))
}
val jffiFuncPool = LazyPool[(FuncSig, Word), JFunction] {
case (sig, funcAddr) => {
val jParamTypes = sig.paramTy.map(jffiTypePool.apply)
val jRetTy = jffiTypePool(sig.retTy)
new JFunction(funcAddr, jRetTy, jParamTypes: _*)
}
}
private def putArgToBuf(buf: ByteBuffer, off: Int, mty: MType, vb: ValueBox): Unit = {
mty match {
case TypeInt(8) => buf.put(off, vb.asInstanceOf[BoxInt].value.toByte)
case TypeInt(16) => buf.putShort(off, vb.asInstanceOf[BoxInt].value.toShort)
case TypeInt(32) => buf.putInt(off, vb.asInstanceOf[BoxInt].value.toInt)
case TypeInt(64) => buf.putLong(off, vb.asInstanceOf[BoxInt].value.toLong)
case TypeFloat() => buf.putFloat(off, vb.asInstanceOf[BoxFloat].value)
case TypeDouble() => buf.putDouble(off, vb.asInstanceOf[BoxDouble].value)
case s @ TypeStruct(flds) => {
val fldvbs = vb.asInstanceOf[BoxStruct].values
for (((fty, fvb), i) <- (flds zip fldvbs).zipWithIndex) {
val off2 = TypeSizes.fieldOffsetOf(s, i)
putArgToBuf(buf, off + off2.toInt, mty, vb)
}
}
case _: AbstractPointerType => buf.putLong(off, vb.asInstanceOf[BoxPointer].addr)
}
}
private def putArg(hib: HeapInvocationBuffer, mty: MType, vb: ValueBox): Unit = {
mty match {
case TypeInt(8) => hib.putByte(vb.asInstanceOf[BoxInt].value.toByte)
case TypeInt(16) => hib.putShort(vb.asInstanceOf[BoxInt].value.toShort)
case TypeInt(32) => hib.putInt(vb.asInstanceOf[BoxInt].value.toInt)
case TypeInt(64) => hib.putLong(vb.asInstanceOf[BoxInt].value.toLong)
case TypeFloat() => hib.putFloat(vb.asInstanceOf[BoxFloat].value)
case TypeDouble() => hib.putDouble(vb.asInstanceOf[BoxDouble].value)
case TypeStruct(flds) => {
val buf = ByteBuffer.allocate(TypeSizes.sizeOf(mty).toInt)
putArgToBuf(buf, 0, mty, vb)
hib.putStruct(buf.array(), buf.arrayOffset())
}
case _: AbstractPointerType => hib.putAddress(vb.asInstanceOf[BoxPointer].addr)
}
}
def callNative(sig: FuncSig, func: Word, args: Seq[ValueBox], retBox: ValueBox): Unit = {
val jFunc = jffiFuncPool((sig, func))
val hib = new HeapInvocationBuffer(jFunc)
for ((mty, vb) <- (sig.paramTy zip args)) {
putArg(hib, mty, vb)
}
val inv = Invoker.getInstance
sig.retTy match {
case TypeInt(8) => {
val rv = inv.invokeInt(jFunc, hib).toByte
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 8)
}
case TypeInt(16) => {
val rv = inv.invokeInt(jFunc, hib).toShort
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 16)
}
case TypeInt(32) => {
val rv = inv.invokeInt(jFunc, hib)
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 32)
}
case TypeInt(64) => {
val rv = inv.invokeLong(jFunc, hib)
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 64)
}
case TypeFloat() => {
val rv = inv.invokeFloat(jFunc, hib)
retBox.asInstanceOf[BoxFloat].value = rv
}
case TypeDouble() => {
val rv = inv.invokeDouble(jFunc, hib)
retBox.asInstanceOf[BoxDouble].value = rv
}
case TypeStruct(flds) => {
val rv = inv.invokeStruct(jFunc, hib)
val buf = ByteBuffer.wrap(rv)
getArgFromBuf(buf, 0, sig.retTy, retBox)
}
case _: AbstractPointerType => {
val rv = inv.invokeAddress(jFunc, hib)
retBox.asInstanceOf[BoxPointer].addr = rv
}
}
}
private def getArgFromBuf(buf: ByteBuffer, off: Int, mty: MType, vb: ValueBox): Unit = {
mty match {
case TypeInt(8) => vb.asInstanceOf[BoxInt].value = OpHelper.trunc(buf.get(off), 8)
case TypeInt(16) => vb.asInstanceOf[BoxInt].value = OpHelper.trunc(buf.getShort(off), 16)
case TypeInt(32) => vb.asInstanceOf[BoxInt].value = OpHelper.trunc(buf.getInt(off), 32)
case TypeInt(64) => vb.asInstanceOf[BoxInt].value = OpHelper.trunc(buf.getLong(off), 64)
case TypeFloat() => vb.asInstanceOf[BoxFloat].value = buf.getFloat(off)
case TypeDouble() => vb.asInstanceOf[BoxDouble].value = buf.getDouble(off)
case s @ TypeStruct(flds) => {
val fldvbs = vb.asInstanceOf[BoxStruct].values
for (((fty, fvb), i) <- (flds zip fldvbs).zipWithIndex) {
val off2 = TypeSizes.fieldOffsetOf(s, i)
getArgFromBuf(buf, off + off2.toInt, mty, vb)
}
}
case _: AbstractPointerType => vb.asInstanceOf[BoxPointer].addr = buf.getLong(off)
}
}
}
\ No newline at end of file
package uvm.utils
import scala.collection.mutable.HashMap
class LazyPool[FromT, ToT](factory: FromT => ToT) {
val pool = HashMap[FromT, ToT]()
def apply(obj: FromT): ToT = pool.get(obj).getOrElse(factory(obj))
}
object LazyPool {
def apply[FromT, ToT](factory: FromT => ToT): LazyPool[FromT, ToT] = new LazyPool[FromT, ToT](factory)
}
package uvm.refimpl.itpr
import org.scalatest._
import java.io.FileReader
import uvm._
import uvm.types._
import uvm.ssavariables._
import uvm.refimpl._
import uvm.refimpl.itpr._
import MemoryOrder._
import AtomicRMWOptr._
import uvm.refimpl.mem.TypeSizes.Word
import ch.qos.logback.classic.Level._
import uvm.refimpl.UvmBundleTesterBase
import com.kenai.jffi.Library
import jnr.posix.POSIXFactory
class UvmInterpreterNativeTests extends UvmBundleTesterBase {
setLogLevels(
ROOT_LOGGER_NAME -> INFO,
"uvm.refimpl.itpr" -> DEBUG)
preloadBundles("tests/uvm-refimpl-test/native-tests.uir")
"The CCALL instruction" should "call the getpid() function" in {
val ca = microVM.newClientAgent()
val lib = Library.getDefault()
val funcAddr = lib.getSymbolAddress("getpid")
val posix = POSIXFactory.getPOSIX
val actualPID = posix.getpid
println("actualPID = %d".format(actualPID))
val func = ca.putFunction("@getpidtest")
val a0 = ca.putInt("@i64", funcAddr)
testFunc(ca, func, Seq(a0)) { (ca, th, st, wp) =>
val Seq(fp, rv) = ca.dumpKeepalives(st, 0)
fp.vb.asPointer shouldEqual funcAddr
rv.vb.asSInt(32) shouldEqual actualPID
TrapRebindPassVoid(st)
}
ca.close()
}
"The CCALL instruction" should "call the write() function" in {
val ca = microVM.newClientAgent()
val lib = Library.getDefault()
val funcAddr = lib.getSymbolAddress("write")
val func = ca.putFunction("@writetest")
val a0 = ca.putInt("@i64", funcAddr)
testFunc(ca, func, Seq(a0)) { (ca, th, st, wp) =>
val Seq(fp, rv, buf, bufV0P) = ca.dumpKeepalives(st, 0)
fp.vb.asPointer shouldEqual funcAddr
rv.vb.asSInt(64) shouldEqual 6
TrapRebindPassVoid(st)
}
ca.close()
}
"The CCALL instruction" should "call the memcpy() function" in {
val ca = microVM.newClientAgent()
val lib = Library.getDefault()
val funcAddr = lib.getSymbolAddress("memcpy")
val func = ca.putFunction("@memcpytest")
val a0 = ca.putInt("@i64", funcAddr)
testFunc(ca, func, Seq(a0)) { (ca, th, st, wp) =>
val Seq(fp, rv, ob, b0, b1, b2, b3, b4, b5) = ca.dumpKeepalives(st, 0)
fp.vb.asPointer shouldEqual funcAddr
rv.vb.asPointer shouldEqual ob.vb.asPointer
b0.vb.asSInt(8) shouldEqual 'H'
b1.vb.asSInt(8) shouldEqual 'e'
b2.vb.asSInt(8) shouldEqual 'l'
b3.vb.asSInt(8) shouldEqual 'l'
b4.vb.asSInt(8) shouldEqual 'o'
b5.vb.asSInt(8) shouldEqual '\n'
TrapRebindPassVoid(st)
}
ca.close()
}
}
\ No newline at end of file
.typedef @i1 = int<1>
.typedef @i6 = int<6>
.typedef @i8 = int<8>
.typedef @i16 = int<16>
.typedef @i32 = int<32>
.typedef @i52 = int<52>
.typedef @i64 = int<64>
.typedef @float = float
.typedef @double = double
.typedef @void = void
.funcsig @noparamsnoret = @void ()
.typedef @funcdumb = func<@noparamsnoret>
.typedef @thread = thread
.typedef @stack = stack
.typedef @tagref64 = tagref64
.const @TRUE <@i64> = 1
.const @FALSE <@i64> = 0
.const @I32_0 <@i32> = 0
.const @I32_1 <@i32> = 1
.const @I32_2 <@i32> = 2
.const @I32_3 <@i32> = 3
.const @I32_4 <@i32> = 4
.const @I32_5 <@i32> = 5
.const @I32_6 <@i32> = 6
.const @I32_7 <@i32> = 7
.const @I64_0 <@i64> = 0
.const @I64_1 <@i64> = 1
.const @I64_2 <@i64> = 2
.const @I64_3 <@i64> = 3
.const @I64_4 <@i64> = 4
.const @I64_5 <@i64> = 5
.const @I64_6 <@i64> = 6
.const @I64_7 <@i64> = 7
.const @F_0 <@float> = 0.0f
.const @F_1 <@float> = 1.0f
.const @F_2 <@float> = 2.0f
.const @F_3 <@float> = 3.0f
.const @F_4 <@float> = 4.0f
.const @F_5 <@float> = 5.0f
.const @F_6 <@float> = 6.0f
.const @F_7 <@float> = 7.0f
.const @D_0 <@double> = 0.0d
.const @D_1 <@double> = 1.0d
.const @D_2 <@double> = 2.0d
.const @D_3 <@double> = 3.0d
.const @D_4 <@double> = 4.0d
.const @D_5 <@double> = 5.0d
.const @D_6 <@double> = 6.0d
.const @D_7 <@double> = 7.0d
.typedef @4xfloat = vector <@float 4>
.typedef @4xi32 = vector <@i32 4>
.typedef @2xdouble = vector <@double 2>
.const @4xI32_V1 <@4xi32> = VEC {@I32_0 @I32_1 @I32_2 @I32_3}
.const @4xI32_V2 <@4xi32> = VEC {@I32_4 @I32_5 @I32_6 @I32_7}
.const @4xF_V1 <@4xfloat> = VEC {@F_0 @F_1 @F_2 @F_3}
.const @4xF_V2 <@4xfloat> = VEC {@F_4 @F_5 @F_6 @F_7}
.const @2xD_V1 <@2xdouble> = VEC {@D_0 @D_1}
.const @2xD_V2 <@2xdouble> = VEC {@D_2 @D_3}
.funcsig @i_i = @i64 (@i64)
.funcsig @i_ii = @i64 (@i64 @i64)
.typedef @refvoid = ref<@void>
.typedef @irefvoid = iref<@void>
.typedef @weakrefvoid = weakref<@void>
.const @NULLREF <@refvoid> = NULL
.const @NULLIREF <@irefvoid> = NULL
.const @NULLFUNC <@funcdumb> = NULL
.const @NULLSTACK <@stack> = NULL
.typedef @refi8 = ref<@i8>
.typedef @irefi8 = iref<@i8>
.typedef @refi64 = ref<@i64>
.typedef @irefi64 = iref<@i64>
.const @NULLREF_I64 <@refi64> = NULL
.const @NULLIREF_I64 <@irefi64> = NULL
.typedef @ptrvoid = ptr<@void>
.typedef @ptri8 = ptr<@i8>
.typedef @ptri16 = ptr<@i16>
.typedef @ptri32 = ptr<@i32>
.typedef @ptri64 = ptr<@i64>
.typedef @ptrfloat = ptr<@float>
.typedef @ptrdouble = ptr<@double>
.typedef @ptrptrvoid = ptr<@ptrvoid>
.typedef @ptrfpi_i = ptr<@fpi_i>
.typedef @fpnoparamsnoret = funcptr<@noparamsnoret>
.typedef @fpi_i = funcptr<@i_i>
.typedef @fpi_ii = funcptr<@i_ii>
.funcsig @v_a = @void (@i64)
.funcsig @getpid_sig = @i32 ()
.typedef @getpid_fp = funcptr<@getpid_sig>
.funcdef @getpidtest VERSION @getpidtest_v1 <@v_a> (%p0) {
%entry:
%fp = PTRCAST <@i64 @getpid_fp> %p0
%rv = CCALL #DEFAULT <@getpid_fp @getpid_sig> %fp ()
%trap = TRAP <@void> KEEPALIVE (%fp %rv)
COMMINST @uvm.thread_exit
}
.typedef @size_t = int<64>
.funcsig @write_sig = @size_t (@i32 @ptrvoid @size_t)
.typedef @write_fp = funcptr<@write_sig>
.typedef @CharBuf = hybrid<@i64 @i8>
.const @I8_H <@i8> = 0x48
.const @I8_e <@i8> = 0x65
.const @I8_l <@i8> = 0x6c
.const @I8_o <@i8> = 0x6f
.const @I8_NL <@i8> = 0x0a
.funcdef @writetest VERSION @writetest_v1 <@v_a> (%p0) {
%entry:
%fp = PTRCAST <@i64 @write_fp> %p0
%buf = NEWHYBRID <@CharBuf @i64> @I64_6
%buf_i = GETIREF <@CharBuf> %buf
%buf_v0 = GETVARPARTIREF <@CharBuf> %buf_i
STORE <@i8> %buf_v0 @I8_H
%buf_v1 = SHIFTIREF <@i8 @i64> %buf_v0 @I64_1
STORE <@i8> %buf_v1 @I8_e
%buf_v2 = SHIFTIREF <@i8 @i64> %buf_v1 @I64_1
STORE <@i8> %buf_v2 @I8_l
%buf_v3 = SHIFTIREF <@i8 @i64> %buf_v2 @I64_1
STORE <@i8> %buf_v3 @I8_l
%buf_v4 = SHIFTIREF <@i8 @i64> %buf_v3 @I64_1
STORE <@i8> %buf_v4 @I8_o
%buf_v5 = SHIFTIREF <@i8 @i64> %buf_v4 @I64_1
STORE <@i8> %buf_v5 @I8_NL
%buf_v0_p = COMMINST @uvm.native.pin <@irefi8> (%buf_v0)
%buf_v0_pv = PTRCAST <@ptri8 @ptrvoid> %buf_v0_p
%rv = CCALL #DEFAULT <@write_fp @write_sig> %fp (@I32_1 %buf_v0_pv @I64_6)
COMMINST @uvm.native.unpin <@irefi8> (%buf_v0)
%trap = TRAP <@void> KEEPALIVE (%fp %rv %buf %buf_v0_p)
COMMINST @uvm.thread_exit
}
.funcsig @memcpy_sig = @ptrvoid (@ptrvoid @ptrvoid @size_t)
.typedef @memcpy_fp = funcptr<@write_sig>
.funcdef @memcpytest VERSION @memcpytest_v1 <@v_a> (%p0) {
%entry:
%fp = PTRCAST <@i64 @write_fp> %p0
%buf = NEWHYBRID <@CharBuf @i64> @I64_6
%buf_i = GETIREF <@CharBuf> %buf
%buf_v0 = GETVARPARTIREF <@CharBuf> %buf_i
STORE <@i8> %buf_v0 @I8_H
%buf_v1 = SHIFTIREF <@i8 @i64> %buf_v0 @I64_1
STORE <@i8> %buf_v1 @I8_e
%buf_v2 = SHIFTIREF <@i8 @i64> %buf_v1 @I64_1
STORE <@i8> %buf_v2 @I8_l
%buf_v3 = SHIFTIREF <@i8 @i64> %buf_v2 @I64_1
STORE <@i8> %buf_v3 @I8_l
%buf_v4 = SHIFTIREF <@i8 @i64> %buf_v3 @I64_1
STORE <@i8> %buf_v4 @I8_o
%buf_v5 = SHIFTIREF <@i8 @i64> %buf_v4 @I64_1
STORE <@i8> %buf_v5 @I8_NL
%buf2 = NEWHYBRID <@CharBuf @i64> @I64_6
%buf2_i = GETIREF <@CharBuf> %buf2
%buf2_v0 = GETVARPARTIREF <@CharBuf> %buf2_i
%buf_v0_p = COMMINST @uvm.native.pin <@irefi8> (%buf_v0)
%buf_v0_pv = PTRCAST <@ptri8 @ptrvoid> %buf_v0_p
%buf2_v0_p = COMMINST @uvm.native.pin <@irefi8> (%buf2_v0)
%buf2_v0_pv = PTRCAST <@ptri8 @ptrvoid> %buf2_v0_p
%rv = CCALL #DEFAULT <@memcpy_fp @memcpy_sig> %fp (%buf2_v0_pv %buf_v0_pv @I64_6)
COMMINST @uvm.native.unpin <@irefi8> (%buf2_v0)
COMMINST @uvm.native.unpin <@irefi8> (%buf_v0)
%buf2_0 = LOAD <@i8> %buf2_v0
%buf2_v1 = SHIFTIREF <@i8 @i64> %buf2_v0 @I64_1
%buf2_1 = LOAD <@i8> %buf2_v1
%buf2_v2 = SHIFTIREF <@i8 @i64> %buf2_v1 @I64_1
%buf2_2 = LOAD <@i8> %buf2_v2
%buf2_v3 = SHIFTIREF <@i8 @i64> %buf2_v2 @I64_1
%buf2_3 = LOAD <@i8> %buf2_v3
%buf2_v4 = SHIFTIREF <@i8 @i64> %buf2_v3 @I64_1
%buf2_4 = LOAD <@i8> %buf2_v4
%buf2_v5 = SHIFTIREF <@i8 @i64> %buf2_v4 @I64_1
%buf2_5 = LOAD <@i8> %buf2_v5
%trap = TRAP <@void> KEEPALIVE (%fp %rv %buf2_v0_p %buf2_0 %buf2_1 %buf2_2 %buf2_3 %buf2_4 %buf2_5)
COMMINST @uvm.thread_exit
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment