WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

Commit 7e002594 authored by Kunshan Wang's avatar Kunshan Wang
Browse files

Fixing interpreter...

parent 6024297e
......@@ -214,6 +214,7 @@ class UIRTextReader(val idFactory: IDFactory) {
def needInt[T <: Type](tc: TypeContext) = needType(tc, classOf[TypeInt], "int")
def needStruct[T <: Type](tc: TypeContext) = needType(tc, classOf[TypeStruct], "struct")
def needAbsStruct[T <: Type](tc: TypeContext) = needType(tc, classOf[AbstractStructType], "struct or hybrid")
def needArray[T <: Type](tc: TypeContext) = needType(tc, classOf[TypeArray], "array")
def needVector[T <: Type](tc: TypeContext) = needType(tc, classOf[TypeVector], "vector")
def needHybrid[T <: Type](tc: TypeContext) = needType(tc, classOf[TypeHybrid], "hybrid")
......@@ -573,7 +574,7 @@ class UIRTextReader(val idFactory: IDFactory) {
i.opnd = ii.opnd
}
case ii: InstGetFieldIRefContext =>
InstGetFieldIRef(ii.ptr != null, needStruct(ii.refTy), ii.intLiteral.intValue, null).later(phase4) { i =>
InstGetFieldIRef(ii.ptr != null, needAbsStruct(ii.refTy), ii.intLiteral.intValue, null).later(phase4) { i =>
i.opnd = ii.opnd
}
case ii: InstGetElemIRefContext =>
......
......@@ -251,20 +251,9 @@ class ClientAgent(mutator: Mutator)(
newHandle(nt, nb)
}
def getFixedPartIRef(handle: Handle): Handle = {
val t = handle.ty.asInstanceOf[TypeIRef]
val ht = t.ty.asInstanceOf[TypeHybrid]
val ft = ht.fixedTy
val nt = InternalTypePool.irefOf(ft)
val ob = handle.vb.asInstanceOf[BoxIRef]
val nb = ob
newHandle(nt, nb)
}
def getVarPartIRef(handle: Handle): Handle = {
val t = handle.ty.asInstanceOf[TypeIRef]
val ht = t.ty.asInstanceOf[TypeHybrid]
val ft = ht.fixedTy
val vt = ht.varTy
val nt = InternalTypePool.irefOf(vt)
val ob = handle.vb.asInstanceOf[BoxIRef]
......
......@@ -31,7 +31,7 @@ object InternalTypes {
val VOID = TypeVoid() := internal("void")
val BYTE = TypeInt(8) := internal("byte")
val BYTE_ARRAY = TypeHybrid(VOID, BYTE) := internal("byte_array")
val BYTE_ARRAY = TypeHybrid(Seq(), BYTE) := internal("byte_array")
val REF_VOID = TypeRef(VOID) := internal("ref_void")
......@@ -49,94 +49,105 @@ object InternalTypePool {
val vecOf = LazyPool[(Type, Long), TypeVector] { case (t, l) => TypeVector(t, l) }
def unmarkedOf(t: Type): Type = t match {
case TypeWeakRef(r) => refOf(r)
case _ => t
case _ => t
}
}
object TypeInferer {
import InternalTypes._
import InternalTypePool._
def ptrOrIRefOf(ptr: Boolean, ty: Type): Type = {
if (ptr) ptrOf(ty) else irefOf(ty)
}
def inferType(v: SSAVariable): Type = v match {
case c: Constant => c.constTy
case c: Constant => c.constTy
case g: GlobalCell => irefOf(g.cellTy)
case f: Function => funcOf(f.sig)
case p: NorParam => p.ty
case p: ExcParam => REF_VOID
case i: InstBinOp => i.opndTy
case f: Function => funcOf(f.sig)
case p: NorParam => p.ty
case p: ExcParam => REF_VOID
case r: InstResult => {
val resTys = inferInstResultTypes(r.inst)
try {
resTys(r.index)
} catch {
case e: ArrayIndexOutOfBoundsException => throw new UvmRefImplException(
s"Instruction ${r.inst} produces only ${resTys.size} results, but result index ${r.index} is requested")
}
}
}
def inferInstResultTypes(inst: Instruction): Seq[Type] = inst match {
case i: InstBinOp => Seq(i.opndTy)
case i: InstCmp => i.opndTy match {
case TypeVector(_, l) => vecOf(I1, l)
case _ => I1
case TypeVector(_, l) => Seq(vecOf(I1, l))
case _ => Seq(I1)
}
case i: InstConv => i.toTy
case i: InstSelect => i.opndTy
case i: InstBranch => VOID
case i: InstBranch2 => VOID
case i: InstSwitch => VOID
case i: InstCall => i.sig.retTy
case i: InstTailCall => VOID
case i: InstRet => VOID
case i: InstThrow => VOID
case i: InstExtractValue => i.strTy.fieldTys(i.index)
case i: InstInsertValue => i.strTy
case i: InstExtractElement => i.seqTy.elemTy
case i: InstInsertElement => i.seqTy
case i: InstShuffleVector => vecOf((i.vecTy.elemTy, i.maskTy.len))
case i: InstNew => refOf(i.allocTy)
case i: InstNewHybrid => refOf(i.allocTy)
case i: InstAlloca => irefOf(i.allocTy)
case i: InstAllocaHybrid => irefOf(i.allocTy)
case i: InstGetIRef => irefOf(i.referentTy)
case i: InstGetFieldIRef => ptrOrIRefOf(i.ptr, i.referentTy.fieldTys(i.index))
case i: InstGetElemIRef => ptrOrIRefOf(i.ptr, i.referentTy.elemTy)
case i: InstShiftIRef => ptrOrIRefOf(i.ptr, i.referentTy)
case i: InstGetFixedPartIRef => ptrOrIRefOf(i.ptr, i.referentTy.fixedTy)
case i: InstGetVarPartIRef => ptrOrIRefOf(i.ptr, i.referentTy.varTy)
case i: InstLoad => unmarkedOf(i.referentTy)
case i: InstStore => VOID
case i: InstCmpXchg => unmarkedOf(i.referentTy)
case i: InstAtomicRMW => unmarkedOf(i.referentTy)
case i: InstFence => VOID
case i: InstTrap => i.retTy
case i: InstWatchPoint => i.retTy
case i: InstCCall => i.sig.retTy
case i: InstNewStack => STACK
case i: InstConv => Seq(i.toTy)
case i: InstSelect => Seq(i.opndTy)
case i: InstBranch => Seq()
case i: InstBranch2 => Seq()
case i: InstSwitch => Seq()
case i: InstCall => i.sig.retTys
case i: InstTailCall => Seq()
case i: InstRet => Seq()
case i: InstThrow => Seq()
case i: InstExtractValue => Seq(i.strTy.fieldTys(i.index))
case i: InstInsertValue => Seq(i.strTy)
case i: InstExtractElement => Seq(i.seqTy.elemTy)
case i: InstInsertElement => Seq(i.seqTy)
case i: InstShuffleVector => Seq(vecOf((i.vecTy.elemTy, i.maskTy.len)))
case i: InstNew => Seq(refOf(i.allocTy))
case i: InstNewHybrid => Seq(refOf(i.allocTy))
case i: InstAlloca => Seq(irefOf(i.allocTy))
case i: InstAllocaHybrid => Seq(irefOf(i.allocTy))
case i: InstGetIRef => Seq(irefOf(i.referentTy))
case i: InstGetFieldIRef => Seq(ptrOrIRefOf(i.ptr, i.referentTy.fieldTys(i.index)))
case i: InstGetElemIRef => Seq(ptrOrIRefOf(i.ptr, i.referentTy.elemTy))
case i: InstShiftIRef => Seq(ptrOrIRefOf(i.ptr, i.referentTy))
case i: InstGetVarPartIRef => Seq(ptrOrIRefOf(i.ptr, i.referentTy.varTy))
case i: InstLoad => Seq(unmarkedOf(i.referentTy))
case i: InstStore => Seq()
case i: InstCmpXchg => Seq(unmarkedOf(i.referentTy), I1)
case i: InstAtomicRMW => Seq(unmarkedOf(i.referentTy))
case i: InstFence => Seq()
case i: InstTrap => i.retTys
case i: InstWatchPoint => i.retTys
case i: InstCCall => i.sig.retTys
case i: InstNewThread => Seq(THREAD)
case i: InstSwapStack => i.curStackAction match {
case RetWith(t) => t
case _: KillOld => VOID
case _: KillOld => Seq()
}
case i: InstCommInst => i.inst.name.get match {
case "@uvm.new_thread" => THREAD
case "@uvm.kill_stack" => VOID
case "@uvm.thread_exit" => VOID
case "@uvm.current_stack" => STACK
case "@uvm.tr64.is_fp" => I1
case "@uvm.tr64.is_int" => I1
case "@uvm.tr64.is_ref" => I1
case "@uvm.tr64.from_fp" => TAGREF64
case "@uvm.tr64.from_int" => TAGREF64
case "@uvm.tr64.from_ref" => TAGREF64
case "@uvm.tr64.to_fp" => DOUBLE
case "@uvm.tr64.to_int" => I52
case "@uvm.tr64.to_ref" => REF_VOID
case "@uvm.tr64.to_tag" => I6
case "@uvm.futex.wait" => I32
case "@uvm.futex.wait_timeout" => I32
case "@uvm.futex.wake" => I32
case "@uvm.futex.cmp_requeue" => I32
case "@uvm.kill_dependency" => i.typeList(0)
case "@uvm.native.pin" => i.typeList(0) match{
case TypeRef(t) => ptrOf(t)
case TypeIRef(t) => ptrOf(t)
case "@uvm.new_stack" => Seq(STACK)
case "@uvm.kill_stack" => Seq()
case "@uvm.thread_exit" => Seq()
case "@uvm.current_stack" => Seq(STACK)
case "@uvm.tr64.is_fp" => Seq(I1)
case "@uvm.tr64.is_int" => Seq(I1)
case "@uvm.tr64.is_ref" => Seq(I1)
case "@uvm.tr64.from_fp" => Seq(TAGREF64)
case "@uvm.tr64.from_int" => Seq(TAGREF64)
case "@uvm.tr64.from_ref" => Seq(TAGREF64)
case "@uvm.tr64.to_fp" => Seq(DOUBLE)
case "@uvm.tr64.to_int" => Seq(I52)
case "@uvm.tr64.to_ref" => Seq(REF_VOID)
case "@uvm.tr64.to_tag" => Seq(I6)
case "@uvm.futex.wait" => Seq(I32)
case "@uvm.futex.wait_timeout" => Seq(I32)
case "@uvm.futex.wake" => Seq(I32)
case "@uvm.futex.cmp_requeue" => Seq(I32)
case "@uvm.kill_dependency" => Seq(i.typeList(0))
case "@uvm.native.pin" => i.typeList(0) match {
case TypeRef(t) => Seq(ptrOf(t))
case TypeIRef(t) => Seq(ptrOf(t))
}
case "@uvm.native.unpin" => VOID
case "@uvm.native.expose" => funcPtrOf(i.funcSigList(0))
case "@uvm.native.unexpose" => VOID
case "@uvm.native.get_cookie" => I64
case "@uvm.native.unpin" => Seq()
case "@uvm.native.expose" => Seq(funcPtrOf(i.funcSigList(0)))
case "@uvm.native.unexpose" => Seq()
case "@uvm.native.get_cookie" => Seq(I64)
}
}
}
\ No newline at end of file
......@@ -23,17 +23,14 @@ class ConstantPool(implicit microVM: MicroVM) {
case ConstInt(ty, num) => BoxInt(OpHelper.unprepare(num, ty.asInstanceOf[TypeInt].length))
case ConstFloat(ty, num) => BoxFloat(num)
case ConstDouble(ty, num) => BoxDouble(num)
case ConstStruct(ty, flds) => BoxStruct(flds.map(maybeMakeBox))
case ConstSeq(ty, elems) => BoxSeq(elems.map(maybeMakeBox))
case ConstNull(ty) => ty match {
case _:TypeVoid => BoxVoid()
case _:TypeRef => BoxRef(0L)
case _:TypeIRef => BoxIRef(0L, 0L)
case _:TypeFuncRef => BoxFunc(None)
case _:TypeThreadRef => BoxThread(None)
case _:TypeStackRef => BoxStack(None)
}
case ConstSeq(ty, elems) => BoxSeq(elems.map(maybeMakeBox))
case ConstPointer(ty, addr) => BoxPointer(addr)
case gc:GlobalCell => BoxIRef(0L, microVM.memoryManager.globalMemory.addrForGlobalCell(gc))
case f:Function => BoxFunc(Some(f))
case ef:ExposedFunc => BoxPointer(microVM.nativeCallHelper.getStaticExpFuncAddr(ef))
......
......@@ -15,8 +15,6 @@ import uvm.refimpl.nat.NativeCallResult
object InterpreterThread {
val logger = Logger(LoggerFactory.getLogger(getClass.getName))
val BOX_VOID = new BoxVoid()
}
class InterpreterThread(val id: Int, initialStack: InterpreterStack, val mutator: Mutator)(
......@@ -1369,7 +1367,7 @@ class InterpreterThread(val id: Int, initialStack: InterpreterStack, val mutator
}
/** Rebind to a stack and pass a value. */
private def rebindPassValue(newStack: InterpreterStack, value: ValueBox): Unit = {
private def rebindPassValues(newStack: InterpreterStack, values: Seq[ValueBox]): Unit = {
val oldState = rebind(newStack)
top match {
......@@ -1377,12 +1375,14 @@ class InterpreterThread(val id: Int, initialStack: InterpreterStack, val mutator
if (mf.justCreated) {
mf.justCreated = false
} else {
try {
boxOf(curInst).copyFrom(value)
} catch {
case e: Exception => {
throw new UvmRuntimeException(ctx + "Error passing value while rebinding. " +
"The new stack is in state %s, the passed value box is a %s.".format(oldState, value.getClass.getName), e)
for ((rb, vb) <- boxesOf(curInst) zip values) {
try {
rb.copyFrom(vb)
} catch {
case e: Exception => {
throw new UvmRuntimeException(ctx + "Error passing value while rebinding. " +
"The new stack is in state %s, the passed value box is a %s.".format(oldState, value.getClass.getName), e)
}
}
}
}
......
......@@ -100,7 +100,6 @@ object ValueBox {
case TypeStruct(fieldTys) => BoxStruct(fieldTys.map(makeBoxForType))
case TypeArray(elemTy, len) => BoxSeq(Seq.fill(len.toInt)(makeBoxForType(elemTy)))
case _: TypeHybrid => throw new UvmRefImplException("hybrid cannot be an SSA variable type")
case _: TypeVoid => BoxVoid()
case _: TypeFuncRef => BoxFunc(None)
case _: TypeStackRef => BoxStack(None)
case _: TypeThreadRef => BoxThread(None)
......
......@@ -215,8 +215,8 @@ class MuFrame(val funcVer: FuncVer, val cookie: Long, prev: Option[InterpreterFr
putBox(p)
}
for (inst <- bb.insts) {
putBox(inst)
for (inst <- bb.insts; res <- inst.results) {
putBox(res)
}
}
}
......
......@@ -90,7 +90,7 @@ object TypeSizes {
}
def hybridSizeOf(ty: TypeHybrid, len: Word): Word = {
val fixedSize = sizeOf(ty.fixedTy)
val fixedSize = structPrefixSizeOf(ty, ty.fieldTys.size)
val varAlign = alignOf(ty.varTy)
val varSize = shiftOffsetOf(ty.varTy, len)
val size = alignUp(fixedSize, varAlign) + varSize
......@@ -98,9 +98,9 @@ object TypeSizes {
}
def hybridAlignOf(ty: TypeHybrid, len: Word): Word = {
val fixedAlign = alignOf(ty.fixedTy)
val fieldAligns = ty.fieldTys.map(alignOf)
val varAlign = alignOf(ty.varTy)
val align = Math.max(fixedAlign, varAlign)
val align = fieldAligns.foldLeft(varAlign)(Math.max)
return align
}
......@@ -123,10 +123,10 @@ object TypeSizes {
def fixedPartOffsetOf(ty: TypeHybrid): Word = 0L
def varPartOffsetOf(ty: TypeHybrid): Word = {
return alignUp(sizeOf(ty.fixedTy), alignOf(ty.varTy))
return alignUp(structPrefixSizeOf(ty, ty.fieldTys.length), alignOf(ty.varTy))
}
def structPrefixSizeOf(ty: TypeStruct, prefixLen: Int): Word = {
def structPrefixSizeOf(ty: AbstractStructType, prefixLen: Int): Word = {
val sz = ty.fieldTys.take(prefixLen).foldLeft(0L) { (oldSz, nextTy) =>
alignUp(oldSz, alignOf(nextTy)) + sizeOf(nextTy)
}
......
......@@ -81,19 +81,20 @@ object MemoryDataScanner extends StrictLogging {
}
case t: TypeHybrid => {
val fixedTy = t.fixedTy
val varTy = t.varTy
val fixedSize = TypeSizes.sizeOf(fixedTy)
val fixedAlign = TypeSizes.alignOf(fixedTy)
val varSize = TypeSizes.sizeOf(varTy)
val varAlign = TypeSizes.alignOf(varTy)
var curAddr = iRef
val varLength = HeaderUtils.getVarLength(iRef)
scanField(fixedTy, objRef, curAddr, handler)
curAddr = TypeSizes.alignUp(curAddr + fixedSize, fixedAlign)
var curAddr = iRef
for (fieldTy <- t.fieldTys) {
val fieldAlign = TypeSizes.alignOf(fieldTy)
curAddr = TypeSizes.alignUp(curAddr, fieldAlign)
scanField(fieldTy, objRef, curAddr, handler)
curAddr += TypeSizes.sizeOf(fieldTy)
}
for (i <- 0L until varLength) {
scanField(varTy, objRef, curAddr, handler)
curAddr = TypeSizes.alignUp(curAddr + varSize, varAlign)
scanField(varTy, objRef, curAddr, handler)
}
}
case t: TypeStackRef => {
......
......@@ -35,7 +35,6 @@ class NativeCallHelper {
/** A mapping of Mu types to JFFI types. Cached for struct types. */
val jffiTypePool: LazyPool[MType, JType] = LazyPool {
case TypeVoid() => JType.VOID
case TypeInt(8) => JType.SINT8
case TypeInt(16) => JType.SINT16
case TypeInt(32) => JType.SINT32
......@@ -52,11 +51,20 @@ class NativeCallHelper {
case t => throw new UvmRefImplException("Type %s cannot be used in native calls.".format(t.repr))
}
/** Map Mu-style multi-return types to C-style single return type */
def getNativeReturnType(retTys: Seq[MType]): JType = {
retTys match {
case Seq() => JType.VOID
case Seq(t) => jffiTypePool(t)
case ts => throw new UvmRefImplException("Multiple return types %s cannot be used in native calls.".format(ts.map(_.repr).mkString(" ")))
}
}
/** A mapping from referenced C functions (signature, function pointer) to JFFI functions. Cached. */
val jffiFuncPool = LazyPool[(FuncSig, Word), JFunction] {
case (sig, funcAddr) => {
val jParamTypes = sig.paramTy.map(jffiTypePool.apply)
val jRetTy = jffiTypePool(sig.retTy)
val jParamTypes = sig.paramTys.map(jffiTypePool.apply)
val jRetTy = getNativeReturnType(sig.retTys)
new JFunction(funcAddr, jRetTy, jParamTypes: _*)
}
}
......@@ -111,7 +119,7 @@ class NativeCallHelper {
val hib = new HeapInvocationBuffer(jFunc)
for ((mty, vb) <- (sig.paramTy zip args)) {
for ((mty, vb) <- (sig.paramTys zip args)) {
putArg(hib, mty, vb)
}
......@@ -120,43 +128,45 @@ class NativeCallHelper {
val inv = Invoker.getInstance
sig.retTy match {
case TypeVoid() => {
sig.retTys match {
case Seq() => {
inv.invokeLong(jFunc, hib)
}
case TypeInt(8) => {
val rv = inv.invokeInt(jFunc, hib).toByte
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 8)
}
case TypeInt(16) => {
val rv = inv.invokeInt(jFunc, hib).toShort
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 16)
}
case TypeInt(32) => {
val rv = inv.invokeInt(jFunc, hib)
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 32)
}
case TypeInt(64) => {
val rv = inv.invokeLong(jFunc, hib)
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 64)
}
case TypeFloat() => {
val rv = inv.invokeFloat(jFunc, hib)
retBox.asInstanceOf[BoxFloat].value = rv
}
case TypeDouble() => {
val rv = inv.invokeDouble(jFunc, hib)
retBox.asInstanceOf[BoxDouble].value = rv
}
case TypeStruct(flds) => {
val rv = inv.invokeStruct(jFunc, hib)
val buf = ByteBuffer.wrap(rv).order(ByteOrder.LITTLE_ENDIAN)
logger.debug("Hexdump:\n" + HexDump.dumpByteBuffer(buf))
getArgFromBuf(buf, 0, sig.retTy, retBox)
}
case _: AbstractPointerType => {
val rv = inv.invokeAddress(jFunc, hib)
retBox.asInstanceOf[BoxPointer].addr = rv
case Seq(t) => t match {
case TypeInt(8) => {
val rv = inv.invokeInt(jFunc, hib).toByte
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 8)
}
case TypeInt(16) => {
val rv = inv.invokeInt(jFunc, hib).toShort
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 16)
}
case TypeInt(32) => {
val rv = inv.invokeInt(jFunc, hib)
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 32)
}
case TypeInt(64) => {
val rv = inv.invokeLong(jFunc, hib)
retBox.asInstanceOf[BoxInt].value = OpHelper.trunc(BigInt(rv), 64)
}
case TypeFloat() => {
val rv = inv.invokeFloat(jFunc, hib)
retBox.asInstanceOf[BoxFloat].value = rv
}
case TypeDouble() => {
val rv = inv.invokeDouble(jFunc, hib)
retBox.asInstanceOf[BoxDouble].value = rv
}
case TypeStruct(flds) => {
val rv = inv.invokeStruct(jFunc, hib)
val buf = ByteBuffer.wrap(rv).order(ByteOrder.LITTLE_ENDIAN)
logger.debug("Hexdump:\n" + HexDump.dumpByteBuffer(buf))
getArgFromBuf(buf, 0, t, retBox)
}
case _: AbstractPointerType => {
val rv = inv.invokeAddress(jFunc, hib)
retBox.asInstanceOf[BoxPointer].addr = rv
}
}
}
currentNativeStackKeeper.remove()
......@@ -221,18 +231,18 @@ class NativeCallHelper {
case _: AbstractPointerType => vb.asInstanceOf[BoxPointer].addr = buf.getLong(off)
}
}
def exposeFuncStatic(expFunc: ExposedFunc): Word = {
val efr = exposeFunc(expFunc.func, expFunc.cookie.num.toLong, false)
expFuncToRec(expFunc) = efr
efr.closureHandle.getAddress
}
def exposeFuncDynamic(muFunc: MFunc, cookie: Long): Word = {
val efr = exposeFunc(muFunc, cookie, true)
efr.closureHandle.getAddress
}
/**
* Expose a Mu function.
*
......@@ -240,8 +250,8 @@ class NativeCallHelper {
*/
private def exposeFunc(muFunc: MFunc, cookie: Long, isDynamic: Boolean): ExpFuncRec = {
val sig = muFunc.sig
val jParamTypes = sig.paramTy.map(jffiTypePool.apply)
val jRetTy = jffiTypePool(sig.retTy)
val jParamTypes = sig.paramTys.map(jffiTypePool.apply)
val jRetTy = getNativeReturnType(sig.retTys)
val clos = new MuCallbackClosure(muFunc, cookie)
val handle = NativeSupport.closureManager.newClosure(clos, jRetTy, jParamTypes.toArray, CallingConvention.DEFAULT)
......@@ -283,19 +293,25 @@ class NativeCallHelper {
val sig = muFunc.sig
val paramBoxes = for ((paramTy, i) <- sig.paramTy.zipWithIndex) yield {
val paramBoxes = for ((paramTy, i) <- sig.paramTys.zipWithIndex) yield {
makeBoxForParam(buf, paramTy, i)
}
val rvBox = ValueBox.makeBoxForType(sig.retTy)
val maybeRvBox = sig.retTys match {
case Seq() => None
case Seq(t) => Some(ValueBox.makeBoxForType(t))
case ts => throw new UvmRefImplException("Multiple return types %s cannot be used in native calls.".format(ts.map(_.repr).mkString(" ")))
}
logger.debug("Calling to Mu nsk.slave...")
nsk.slave.onCallBack(muFunc, cookie, paramBoxes, rvBox)
nsk.slave.onCallBack(muFunc, cookie, paramBoxes, maybeRvBox)
logger.debug("Back from nsk.slave. Returning to native...")
putRvToBuf(buf, sig.retTy, rvBox)
maybeRvBox.foreach { rvBox =>
putRvToBuf(buf, sig.retTys(0), rvBox)
}
currentNativeStackKeeper.set(nsk)
} catch {
......@@ -340,7 +356,6 @@ class NativeCallHelper {
}
def putRvToBuf(buf: Closure.Buffer, ty: MType, vb: ValueBox): Unit = ty match {
case TypeVoid() => // do nothing
case TypeInt(8) => buf.setByteReturn(vb.asInstanceOf[BoxInt].value.toByte)
case TypeInt(16) => buf.setShortReturn(vb.asInstanceOf[BoxInt].value.toShort)
case TypeInt(32) => buf.setIntReturn(vb.asInstanceOf[BoxInt].value.toInt)
......
......@@ -31,7 +31,7 @@ trait PoorManAgent[T] {
abstract class NativeCallResult
object NativeCallResult {
case class CallBack(func: MFunc, cookie: Long, args: Seq[ValueBox], retBox: ValueBox) extends NativeCallResult