Commit 559c72dc authored by Kunshan Wang's avatar Kunshan Wang
Browse files

Tested parser.

parent 3323a4b7
......@@ -2,8 +2,9 @@ package uvm
import uvm.types._
import uvm.ssavariables._
import scala.collection.mutable.HashMap
class Bundle {
abstract class Bundle {
/*
* There is a hierarchy of namespaces. A subnode is a subset of the parent.
*
......@@ -35,7 +36,30 @@ class Bundle {
val globalCellNs = globalVarNs.makeSubSpace[GlobalCell]()
val funcNs = globalVarNs.makeSubSpace[Function]()
val expFuncNs = globalVarNs.makeSubSpace[ExposedFunc]()
}
/**
* This kind of bundle is generated when parsing a .UIR file.
* <p>
* In this kind of bundle, a Function does not have a FuncVer as its version.
* The funcNs only contains new functions declared in this bundle, not existing
* functions declared previously. When this bundle is merged with the global bundle,
* both funcNs and funcVerNs are simply merged, and new FuncVer objects become the
* newest version of the Function, whether the Function is newly declared or not.
*/
class TrantientBundle extends Bundle {
/**
* All functions (declared here or previously) that are defined in this bundle.
* <p>
* Mainly for debugging purpose.
*/
//val defFuncNs = new SimpleNamespace[Function]
}
/**
* This kind of bundle holds the global state. Functions and versions are fully merged.
*/
class GlobalBundle extends Bundle {
private def simpleMerge[T <: Identified](oldNs: Namespace[T], newNs: Namespace[T]) {
for (cand <- newNs.all) {
try {
......@@ -49,7 +73,13 @@ class Bundle {
}
}
def merge(newBundle: Bundle) {
private def redefineFunctions(newNs: Namespace[FuncVer]) {
for (fv <- newNs.all) {
fv.func.versions = fv :: fv.func.versions
}
}
def merge(newBundle: TrantientBundle) {
// Only merge leaves
simpleMerge(typeNs, newBundle.typeNs)
simpleMerge(funcSigNs, newBundle.funcSigNs)
......@@ -58,5 +88,8 @@ class Bundle {
simpleMerge(globalCellNs, newBundle.globalCellNs)
simpleMerge(funcNs, newBundle.funcNs)
simpleMerge(expFuncNs, newBundle.expFuncNs)
redefineFunctions(newBundle.funcVerNs)
}
}
}
\ No newline at end of file
......@@ -18,12 +18,12 @@ class UIRTextReader(val idFactory: IDFactory) {
import UIRTextReader._
import uvm.ir.textinput.Later.Laterable
def read(ir: String, globalBundle: Bundle): Bundle = {
def read(ir: String, globalBundle: GlobalBundle): TrantientBundle = {
val input = new ANTLRInputStream(ir)
read(ir, input, globalBundle)
}
def read(ir: java.io.Reader, globalBundle: Bundle): Bundle = {
def read(ir: java.io.Reader, globalBundle: GlobalBundle): TrantientBundle = {
val sb = new StringBuilder()
val cb = new Array[Char](4096)
......@@ -57,7 +57,7 @@ class UIRTextReader(val idFactory: IDFactory) {
def getMessages(): String = buf.mkString("\n")
}
def read(source: String, ais: ANTLRInputStream, globalBundle: Bundle): Bundle = {
def read(source: String, ais: ANTLRInputStream, globalBundle: GlobalBundle): TrantientBundle = {
val ea = new AccumulativeAntlrErrorListener(source)
val lexer = new UIRLexer(ais)
......@@ -144,8 +144,8 @@ class UIRTextReader(val idFactory: IDFactory) {
case e: Exception => throw new TextIRParsingException(inCtx(ctx, s), e)
}
def read(ir: IrContext, globalBundle: Bundle): Bundle = {
val bundle = new Bundle()
def read(ir: IrContext, globalBundle: GlobalBundle): TrantientBundle = {
val bundle = new TrantientBundle()
// Resolve global entities. (If any resXxxx is not present, that's because it is simply not currently used)
......@@ -364,6 +364,7 @@ class UIRTextReader(val idFactory: IDFactory) {
}
case fdef: FuncDefContext => {
val func = findOldFunc(fdef.nam).getOrElse(declFunc(fdef.nam, fdef.funcSig))
//bundle.defFuncNs.add(func)
funcDefs += ((func, fdef))
}
case edef: FuncExpDefContext => {
......@@ -385,7 +386,7 @@ class UIRTextReader(val idFactory: IDFactory) {
addFuncVer(ver)
ver.func = func
func.versions = ver :: func.versions
//func.versions = ver :: func.versions // Don't override here. Let the MicroVM redefine functions.
def globalizeBB(name: String): String = globalize(name, verName)
......
......@@ -153,7 +153,6 @@ case class KillOld() extends CurStackAction
abstract class NewStackAction
case class PassValue(var argTy: Type, var arg: SSAVariable) extends NewStackAction
case class PassVoid() extends NewStackAction
case class ThrowExc(var exc: SSAVariable) extends NewStackAction
/**
......
......@@ -717,6 +717,6 @@ class TextIRWriterTest extends FlatSpec with Matchers {
println(ir)
val muBundle = new UIRTextReader(new IDFactory).read(ir, new uvm.Bundle())
val muBundle = new UIRTextReader(new IDFactory).read(ir, new uvm.GlobalBundle())
}
}
\ No newline at end of file
......@@ -3,25 +3,25 @@ package uvm.ir.textinput
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import uvm.Bundle
import uvm.GlobalBundle
import uvm.TrantientBundle
class NicerErrorMessage extends FlatSpec with Matchers
with TestingBundlesValidators {
def parseFile(fileName: String, globalBundle: Bundle, fac: Option[IDFactory] = None): Bundle = {
def parseFile(fileName: String, globalBundle: GlobalBundle, fac: Option[IDFactory] = None): TrantientBundle = {
val idf = fac.getOrElse(new IDFactory())
val r = new UIRTextReader(idf)
val ir = r.read(new java.io.FileReader(fileName), globalBundle)
ir
}
val EMPTY_BUNDLE = new Bundle()
behavior of "UIRTextReader"
it should "give nice error messages" in {
try {
val b = parseFile("tests/uvm-parsing-test/bundle-with-error.uir", EMPTY_BUNDLE)
val gb = new GlobalBundle()
val b = parseFile("tests/uvm-parsing-test/bundle-with-error.uir", gb)
} catch {
case e: TextIRParsingException => // expected
e.printStackTrace()
......
......@@ -3,48 +3,54 @@ package uvm.ir.textinput
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import uvm.Bundle
import uvm.GlobalBundle
import uvm.TrantientBundle
class UIRTextReaderSpec extends FlatSpec with Matchers
with TestingBundlesValidators {
class UIRTextReaderSpec extends FlatSpec with Matchers
with TestingBundlesValidators {
def parseFile(fileName: String, globalBundle: Bundle, fac: Option[IDFactory] = None): Bundle = {
def parseFile(fileName: String, globalBundle: GlobalBundle, fac: Option[IDFactory] = None): TrantientBundle = {
val idf = fac.getOrElse(new IDFactory())
val r = new UIRTextReader(idf)
val ir = r.read(new java.io.FileReader(fileName), globalBundle)
ir
}
val EMPTY_BUNDLE = new Bundle()
behavior of "UIRTextReader"
def parseFresh(fileName: String): GlobalBundle = {
val gb = new GlobalBundle()
val tb = parseFile(fileName, gb)
gb.merge(tb)
gb
}
it should "read simple type definitions" in {
val b = parseFile("tests/uvm-parsing-test/types.uir", EMPTY_BUNDLE)
val b = parseFresh("tests/uvm-parsing-test/types.uir")
validateTypes(b)
}
it should "read simple constant definitions" in {
val b = parseFile("tests/uvm-parsing-test/constants.uir", EMPTY_BUNDLE)
val b = parseFresh("tests/uvm-parsing-test/constants.uir")
validateConstants(b)
}
it should "read simple function definitions" in {
val b = parseFile("tests/uvm-parsing-test/functions.uir", EMPTY_BUNDLE)
val b = parseFresh("tests/uvm-parsing-test/functions.uir")
validateFunctions(b)
}
it should "read simple instruction definitions" in {
val b = parseFile("tests/uvm-parsing-test/instructions.uir", EMPTY_BUNDLE)
val b = parseFresh("tests/uvm-parsing-test/instructions.uir")
validateInstructions(b)
}
/*
it should "handle loading of multiple bundles" in {
val idf = new IDFactory()
val gb = parseFile("tests/uvm-parsing-test/redef-base.uir", EMPTY_BUNDLE, Some(idf))
val b = parseFile("tests/uvm-parsing-test/redef-overlay.uir", gb, Some(idf))
validateRedef(gb, b)
gb.merge(b)
validateRedefAfterMerge(gb, b)
val gb = new GlobalBundle()
val b1 = parseFile("tests/uvm-parsing-test/redef-base.uir", gb, Some(idf))
gb.merge(b1)
val b2 = parseFile("tests/uvm-parsing-test/redef-overlay.uir", gb, Some(idf))
validateRedef(gb, b1, b2)
gb.merge(b2)
validateRedefAfterMerge(gb, b2)
}
*/
}
\ No newline at end of file
......@@ -250,226 +250,227 @@
%tc = TAILCALL <@iii_sig> @callee2 (%p0 %p1)
}
// .typedef @sid = struct <@i64 @double>
// .typedef @rsid = ref<@sid>
// .const @sid1 <@sid> = {@I64_1 @D_1}
//
// .const @v1 <@4xfloat> = VEC {@F_0 @F_0 @F_0 @F_0}
// .const @v2 <@4xfloat> = VEC {@F_1 @F_1 @F_1 @F_1}
//
// .const @I32_4 <@i32> = 4
// .const @I32_6 <@i32> = 6
// .const @vshf <@4xi32> = VEC {@I32_0 @I32_2 @I32_4 @I32_6}
//
// .funcdef @aggregate VERSION %v1 <@npnr_sig> () {
// %entry:
// %e0 = EXTRACTVALUE <@sid 0> @sid1
// %e1 = EXTRACTVALUE <@sid 1> @sid1
// %i0 = INSERTVALUE <@sid 0> @sid1 @I64_0
// %i1 = INSERTVALUE <@sid 1> @sid1 @D_0
// %ee0 = EXTRACTELEMENT <@4xfloat @i32> @v1 @I32_0
// %ie0 = INSERTELEMENT <@4xfloat @i32> @v1 @I32_1 @F_1
// %sv0 = SHUFFLEVECTOR <@4xfloat @4xi32> @v1 @v2 @vshf
//
// RET @VOID
// }
//
// .typedef @al = array <@i64 10>
// .typedef @ral = ref<@al>
// .typedef @hic = hybrid <@i64 @i8>
// .typedef @rhic = ref<@hic>
//
// .funcsig @memops.sig = @void (@i64 @i64)
// .funcdef @memops VERSION %v1 <@memops.sig> (%p0 %p1) {
// %entry:
// %new = NEW <@i64>
// %newhybrid = NEWHYBRID <@hic @i64> %p0
// %alloca = ALLOCA <@i64 >
// %allocahybrid = ALLOCAHYBRID <@hic @i64> %p0
//
// %new_s = NEW <@i64> EXC(%bb2 %handler)
// %bb2:
// %newhybrid_s = NEWHYBRID <@hic @i64> %p0 EXC(%bb3 %handler)
// %bb3:
// %alloca_s = ALLOCA <@i64 > EXC(%bb4 %handler)
// %bb4:
// %allocahybrid_s = ALLOCAHYBRID <@hic @i64> %p0 EXC(%bb5 %handler)
//
// %bb5:
// %new2 = NEW <@sid>
// %alloca2 = ALLOCA <@al>
//
// %getiref = GETIREF <@sid> %new2
//
// %getfieldiref = GETFIELDIREF <@sid 0> %getiref
// %getelemiref = GETELEMIREF <@al @i64> %alloca2 %p1
//
// %getfixedpartiref = GETFIXEDPARTIREF <@hic> %allocahybrid
// %getvarpartiref = GETVARPARTIREF <@hic> %allocahybrid
//
// %shiftiref = SHIFTIREF <@i8 @i64> %getvarpartiref %p1
//
// %load = LOAD <@i64> %alloca
// %store = STORE <@i64> %alloca @I64_42
// %cmpxchg = CMPXCHG SEQ_CST SEQ_CST <@i64> %alloca @I64_42 @I64_0
// %cmpxchg_w = CMPXCHG WEAK SEQ_CST SEQ_CST <@i64> %alloca @I64_42 @I64_0
// %atomicrmw = ATOMICRMW SEQ_CST ADD <@i64> %alloca @I64_43
//
// %load_s = LOAD <@i64> %alloca EXC(%bb6 %handler)
// %bb6:
// %store_s = STORE <@i64> %alloca @I64_42 EXC(%bb7 %handler)
// %bb7:
// %cmpxchg_s = CMPXCHG SEQ_CST SEQ_CST <@i64> %alloca @I64_42 @I64_0 EXC(%bb8 %handler)
// %bb8:
// %atomicrmw_s= ATOMICRMW SEQ_CST ADD <@i64> %alloca @I64_43 EXC(%bb9 %handler)
//
// %bb9:
// %fence = FENCE SEQ_CST
//
// RET @VOID
//
// %handler:
// RET @VOID
// }
//
// .funcsig @memops_ptr.sig = @void (@i64 @i64)
// .funcdef @memops_ptr VERSION %v1 <@memops_ptr.sig> (%p0 %p1) {
// %entry:
// %new = NEW <@i64>
// %newhybrid = NEWHYBRID <@hic @i64> %p0
// %new2 = NEW <@sid>
// %new3 = NEW <@al>
//
// %p = COMMINST @uvm.native.pin <@ri64> (%new)
// %ph = COMMINST @uvm.native.pin <@rhic> (%newhybrid)
// %p2 = COMMINST @uvm.native.pin <@rsid> (%new2)
// %p3 = COMMINST @uvm.native.pin <@ral> (%new3)
//
// %getfieldiref = GETFIELDIREF PTR <@sid 0> %p2
// %getelemiref = GETELEMIREF PTR <@al @i64> %p3 %p1
//
// %getfixedpartiref = GETFIXEDPARTIREF PTR <@hic> %ph
// %getvarpartiref = GETVARPARTIREF PTR <@hic> %ph
//
// %shiftiref = SHIFTIREF PTR <@i8 @i64> %getvarpartiref %p1
//
// %load = LOAD PTR <@i64> %p
// %store = STORE PTR <@i64> %p @I64_42
// %cmpxchg = CMPXCHG PTR SEQ_CST SEQ_CST <@i64> %p @I64_42 @I64_0
// %atomicrmw = ATOMICRMW PTR SEQ_CST ADD <@i64> %p @I64_43
//
// RET @VOID
// }
//
//
// .funcsig @memorder.sig = @void (@ii64)
// .funcdef @memorder VERSION %v1 <@memorder.sig> (%p0) {
// %entry:
// %l0 = LOAD NOT_ATOMIC <@i64> %p0
// %l1 = LOAD RELAXED <@i64> %p0
// %l2 = LOAD CONSUME <@i64> %p0
// %l3 = LOAD ACQUIRE <@i64> %p0
// %s4 = STORE RELEASE <@i64> %p0 @I64_42
// %c5 = CMPXCHG ACQ_REL ACQUIRE <@i64> %p0 @I64_42 @I64_43
// %l6 = LOAD SEQ_CST <@i64> %p0
//
// RET @VOID
// }
//
// .funcsig @atomicrmwops.sig = @void (@ii64 @i64)
// .funcdef @atomicrmwops VERSION %v1 <@atomicrmwops.sig> (%p0 %p1) {
// %entry:
// %old0 = ATOMICRMW ACQ_REL XCHG <@i64> %p0 %p1
// %old1 = ATOMICRMW ACQ_REL ADD <@i64> %p0 %p1
// %old2 = ATOMICRMW ACQ_REL SUB <@i64> %p0 %p1
// %old3 = ATOMICRMW ACQ_REL AND <@i64> %p0 %p1
// %old4 = ATOMICRMW ACQ_REL NAND <@i64> %p0 %p1
// %old5 = ATOMICRMW ACQ_REL OR <@i64> %p0 %p1
// %old6 = ATOMICRMW ACQ_REL XOR <@i64> %p0 %p1
// %old7 = ATOMICRMW ACQ_REL MAX <@i64> %p0 %p1
// %old8 = ATOMICRMW ACQ_REL MIN <@i64> %p0 %p1
// %old9 = ATOMICRMW ACQ_REL UMAX <@i64> %p0 %p1
// %olda = ATOMICRMW ACQ_REL UMIN <@i64> %p0 %p1
//
// RET @VOID
// }
//
// .funcdef @traps VERSION %v1 <@npnr_sig> () {
// %entry:
// %a = ADD <@i64> @I64_42 @I64_43
// %b = SUB <@i64> @I64_42 @I64_43
// %c = MUL <@i64> @I64_42 @I64_43
//
// %tp = TRAP <@i32> KEEPALIVE(%a)
// %tp_s = TRAP <@i64> EXC(%tp_s_cont %tp_s_exc) KEEPALIVE(%b)
//
// %tp_s_cont:
// %wp = WATCHPOINT 1 <@float> %wp_dis_cont %wp_ena_cont KEEPALIVE(%a)
//
// %wp_dis_cont:
// %wp_s = WATCHPOINT 2 <@double> %wp_s_dis_cont %wp_s_ena_cont WPEXC(%wp_s_exc) KEEPALIVE(%b)
//
// %wp_ena_cont:
// RET @VOID
//
// %wp_s_dis_cont:
// RET @VOID
//
// %wp_s_ena_cont:
// RET @VOID
//
// %tp_s_exc:
// %exc = LANDINGPAD
// THROW %exc
//
// %wp_s_exc:
// %exc2 = LANDINGPAD
// THROW %exc2
// }
//
// .funcsig @ccall_callee.sig = @void (@double)
// .typedef @ccall_callee_fp = funcptr<@ccall_callee.sig>
//
// .funcsig @ccall.sig = @void (@ccall_callee_fp)
// .funcdef @ccall VERSION %v1 <@ccall.sig> (%p0) {
// %entry:
// %rv = CCALL #DEFAULT <@ccall_callee_fp @ccall_callee.sig> %p0 (@D_1)
//
// RET @VOID
// }
//
// .funcsig @gen.sig = @void (@stack)
// .funcdef @gen VERSION %v1 <@npnr_sig> (%main) {
// %entry:
// %ss1 = SWAPSTACK %main RET_WITH <@void> PASS_VALUE <@i64> @I64_0
// %ss2 = SWAPSTACK %main KILL_OLD THROW_EXC @NULLREF
// THROW @NULLREF // unreachable
// }
//
// .funcdef @swapstack VERSION %v1 <@npnr_sig> () {
// %entry:
// %curstack = COMMINST @uvm.current_stack
// %coro = NEWSTACK <@iii_sig> @callee2 (%curstack) EXC(%cont %exc)
//
// %cont:
// %ss1 = SWAPSTACK %coro RET_WITH <@i64> PASS_VOID KEEPALIVE(%curstack)
// %ss2 = SWAPSTACK %coro RET_WITH <@i64> PASS_VOID EXC(%nor %exc)
//
// %nor:
// RET @VOID
// %exc:
// RET @VOID
// }
//
// .funcdef @comminst VERSION %v1 <@npnr_sig> () {
// %entry:
// %curstack = COMMINST @uvm.current_stack
// %sta = NEWSTACK <@iii_sig> @callee2 (%curstack)
// %thr = COMMINST @uvm.new_thread (%sta)
//
// %ex = COMMINST @uvm.native.expose [#DEFAULT] <[@npnr_sig]> (@swapstack)
//
// %th_ex = COMMINST @uvm.thread_exit
// RET @VOID
// }
// */
.typedef @sid = struct <@i64 @double>
.typedef @rsid = ref<@sid>
.const @sid1 <@sid> = {@I64_1 @D_1}
.const @v1 <@4xfloat> = {@F_0 @F_0 @F_0 @F_0}
.const @v2 <@4xfloat> = {@F_1 @F_1 @F_1 @F_1}
.const @I32_4 <@i32> = 4
.const @I32_6 <@i32> = 6
.const @vshf <@4xi32> = {@I32_0 @I32_2 @I32_4 @I32_6}
.funcdef @aggregate VERSION %v1 <@npnr_sig> {
%entry():
%e0 = EXTRACTVALUE <@sid 0> @sid1
%e1 = EXTRACTVALUE <@sid 1> @sid1
%i0 = INSERTVALUE <@sid 0> @sid1 @I64_0
%i1 = INSERTVALUE <@sid 1> @sid1 @D_0
%ee0 = EXTRACTELEMENT <@4xfloat @i32> @v1 @I32_0
%ie0 = INSERTELEMENT <@4xfloat @i32> @v1 @I32_1 @F_1
%sv0 = SHUFFLEVECTOR <@4xfloat @4xi32> @v1 @v2 @vshf
RET @VOID
}
.typedef @al = array <@i64 10>
.typedef @ral = ref<@al>
.typedef @hic = hybrid <@i64 @i8>
.typedef @rhic = ref<@hic>
.typedef @irefi64 = iref<@i64>
.typedef @irhic = iref<@hic>
.funcsig @memops.sig = @void (@i64 @i64)
.funcdef @memops VERSION %v1 <@memops.sig> {
%entry(<@i64> %p0 <@i64> %p1):
%new = NEW <@i64>
%newhybrid = NEWHYBRID <@hic @i64> %p0
%alloca = ALLOCA <@i64>
%allocahybrid = ALLOCAHYBRID <@hic @i64> %p0
%new_s = NEW <@i64> EXC(%bb2(%alloca %allocahybrid %p0 %p1) %handler())
%bb2(<@irefi64> %alloca <@irhic> %allocahybrid <@i64> %p0 <@i64> %p1):
%newhybrid_s = NEWHYBRID <@hic @i64> %p0 EXC(%bb3(%alloca %allocahybrid %p0 %p1) %handler())
%bb3(<@irefi64> %alloca <@irhic> %allocahybrid <@i64> %p0 <@i64> %p1):
%alloca_s = ALLOCA <@i64 > EXC(%bb4(%alloca %allocahybrid %p0 %p1) %handler())
%bb4(<@irefi64> %alloca <@irhic> %allocahybrid <@i64> %p0 <@i64> %p1):
%allocahybrid_s = ALLOCAHYBRID <@hic @i64> %p0 EXC(%bb5(%alloca %allocahybrid %p1) %handler())
%bb5(<@irefi64> %alloca <@irhic> %allocahybrid <@i64> %p1):
%new2 = NEW <@sid>
%alloca2 = ALLOCA <@al>
%getiref = GETIREF <@sid> %new2
%getfieldiref = GETFIELDIREF <@sid 0> %getiref
%getelemiref = GETELEMIREF <@al @i64> %alloca2 %p1
%getfixedpartiref = GETFIXEDPARTIREF <@hic> %allocahybrid
%getvarpartiref = GETVARPARTIREF <@hic> %allocahybrid
%shiftiref = SHIFTIREF <@i8 @i64> %getvarpartiref %p1
%load = LOAD <@i64> %alloca
%store = STORE <@i64> %alloca @I64_42
%cmpxchg = CMPXCHG SEQ_CST SEQ_CST <@i64> %alloca @I64_42 @I64_0
%cmpxchg_w = CMPXCHG WEAK SEQ_CST SEQ_CST <@i64> %alloca @I64_42 @I64_0
%atomicrmw = ATOMICRMW SEQ_CST ADD <@i64> %alloca @I64_43
%load_s = LOAD <@i64> %alloca EXC(%bb6(%alloca) %handler())
%bb6(<@irefi64> %alloca ):
%store_s = STORE <@i64> %alloca @I64_42 EXC(%bb7(%alloca) %handler())
%bb7(<@irefi64> %alloca ):
%cmpxchg_s = CMPXCHG SEQ_CST SEQ_CST <@i64> %alloca @I64_42 @I64_0 EXC(%bb8(%alloca) %handler())
%bb8(<@irefi64> %alloca ):
%atomicrmw_s= ATOMICRMW SEQ_CST ADD <@i64> %alloca @I64_43 EXC(%bb9() %handler())
//BRANCH %bb9()
%bb9():
%fence = FENCE SEQ_CST
RET @VOID
%handler() [%exc]:
RET @VOID
}
.funcsig @memops_ptr.sig = @void (@i64 @i64)
.funcdef @memops_ptr VERSION %v1 <@memops_ptr.sig> {
%entry(<@i64> %p0 <@i64> %p1):
%new = NEW <@i64>
%newhybrid = NEWHYBRID <@hic @i64> %p0
%new2 = NEW <@sid>
%new3 = NEW <@al>
%p = COMMINST @uvm.native.pin <@ri64> (%new)
%ph = COMMINST @uvm.native.pin <@rhic> (%newhybrid)
%p2 = COMMINST @uvm.native.pin <@rsid> (%new2)
%p3 = COMMINST @uvm.native.pin <@ral> (%new3)
%getfieldiref = GETFIELDIREF PTR <@sid 0> %p2
%getelemiref = GETELEMIREF PTR <@al @i64> %p3 %p1
%getfixedpartiref = GETFIXEDPARTIREF PTR <@hic> %ph
%getvarpartiref = GETVARPARTIREF PTR <@hic> %ph
%shiftiref = SHIFTIREF PTR <@i8 @i64> %getvarpartiref %p1
%load = LOAD PTR <@i64> %p
%store = STORE PTR <@i64> %p @I64_42
%cmpxchg = CMPXCHG PTR SEQ_CST SEQ_CST <@i64> %p @I64_42 @I64_0
%atomicrmw = ATOMICRMW PTR SEQ_CST ADD <@i64> %p @I64_43
RET @VOID
}
.funcsig @memorder.sig = @void (@ii64)
.funcdef @memorder VERSION %v1 <@memorder.sig> {
%entry(<@ii64> %p0):
%l0 = LOAD NOT_ATOMIC <@i64> %p0
%l1 = LOAD RELAXED <@i64> %p0
%l2 = LOAD CONSUME <@i64> %p0
%l3 = LOAD ACQUIRE <@i64> %p0
%s4 = STORE RELEASE <@i64> %p0 @I64_42
%c5 = CMPXCHG ACQ_REL ACQUIRE <@i64> %p0 @I64_42 @I64_43
%l6 = LOAD SEQ_CST <@i64> %p0
RET @VOID
}
.funcsig @atomicrmwops.sig = @void (@ii64 @i64)
.funcdef @atomicrmwops VERSION %v1 <@atomicrmwops.sig> {
%entry(<@ii64> %p0 <@i64> %p1):
%old0 = ATOMICRMW ACQ_REL XCHG <@i64> %p0 %p1
%old1 = ATOMICRMW ACQ_REL ADD <@i64> %p0 %p1
%old2 = ATOMICRMW ACQ_REL SUB <@i64> %p0 %p1
%old3 = ATOMICRMW ACQ_REL AND <@i64> %p0 %p1
%old4 = ATOMICRMW ACQ_REL NAND <@i64> %p0 %p1
%old5 = ATOMICRMW ACQ_REL OR <@i64> %p0 %p1
%old6 = ATOMICRMW ACQ_REL XOR <@i64> %p0 %p1
%old7 = ATOMICRMW ACQ_REL MAX <@i64> %p0 %p1
%old8 = ATOMICRMW ACQ_REL MIN <@i64> %p0 %p1
%old9 = ATOMICRMW ACQ_REL UMAX <@i64> %p0 %p1
%olda = ATOMICRMW ACQ_REL UMIN <@i64> %p0 %p1
RET @VOID
}
.funcdef @traps VERSION %v1 <@npnr_sig> {