Commit 39945140 authored by qinsoon's avatar qinsoon

renaming MuFunction to MuFunctionVersion

parent 933b1e25
......@@ -18,7 +18,7 @@ pub type Address = usize; // TODO: replace this with Address(usize)
pub type OpIndex = usize;
#[derive(Debug)]
pub struct MuFunction {
pub struct MuFunctionVersion {
pub fn_name: MuTag,
pub next_id: MuID,
......@@ -32,9 +32,9 @@ pub struct MuFunction {
pub const RESERVED_NODE_IDS_FOR_MACHINE : usize = 100;
impl MuFunction {
pub fn new(fn_name: MuTag, sig: P<MuFuncSig>) -> MuFunction {
MuFunction{
impl MuFunctionVersion {
pub fn new(fn_name: MuTag, sig: P<MuFuncSig>) -> MuFunctionVersion {
MuFunctionVersion{
fn_name: fn_name,
next_id: RESERVED_NODE_IDS_FOR_MACHINE,
sig: sig,
......@@ -317,7 +317,7 @@ pub struct TreeNode {
}
impl TreeNode {
// this is a hack to allow creating TreeNode without using a &mut MuFunction
// this is a hack to allow creating TreeNode without using a &mut MuFunctionVersion
pub fn new_inst(id: MuID, v: Instruction) -> P<TreeNode> {
P(TreeNode{
id: id,
......
......@@ -20,7 +20,7 @@ impl CompilerPass for InstructionSelection {
}
#[allow(unused_variables)]
fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
println!("{}", self.name());
}
}
......@@ -37,7 +37,7 @@ impl <'a> InstructionSelection {
// 3. we need to backup/restore all the callee-saved registers
// if any of these assumption breaks, we will need to re-emit the code
#[allow(unused_variables)]
fn instruction_select(&mut self, node: &'a P<TreeNode>, cur_func: &MuFunction) {
fn instruction_select(&mut self, node: &'a P<TreeNode>, cur_func: &MuFunctionVersion) {
trace!("instsel on node {}", node);
match node.v {
......@@ -349,7 +349,7 @@ impl <'a> InstructionSelection {
}
#[allow(unused_variables)]
fn process_dest(&mut self, ops: &Vec<P<TreeNode>>, dest: &Destination, cur_func: &MuFunction) {
fn process_dest(&mut self, ops: &Vec<P<TreeNode>>, dest: &Destination, cur_func: &MuFunctionVersion) {
for i in 0..dest.args.len() {
let ref dest_arg = dest.args[i];
match dest_arg {
......@@ -426,7 +426,7 @@ impl <'a> InstructionSelection {
self.backend.end_block(block_name);
}
fn emit_common_epilogue(&mut self, ret_inst: &Instruction, cur_func: &MuFunction) {
fn emit_common_epilogue(&mut self, ret_inst: &Instruction, cur_func: &MuFunctionVersion) {
// epilogue is not a block (its a few instruction inserted before return)
// FIXME: this may change in the future
......@@ -480,7 +480,7 @@ impl <'a> InstructionSelection {
}
}
fn emit_cmp_res(&mut self, cond: &P<TreeNode>, cur_func: &MuFunction) -> op::CmpOp {
fn emit_cmp_res(&mut self, cond: &P<TreeNode>, cur_func: &MuFunctionVersion) -> op::CmpOp {
match cond.v {
TreeNode_::Instruction(ref inst) => {
let ops = inst.ops.borrow();
......@@ -544,7 +544,7 @@ impl <'a> InstructionSelection {
}
}
fn emit_ireg(&mut self, op: &P<TreeNode>, cur_func: &MuFunction) -> P<Value> {
fn emit_ireg(&mut self, op: &P<TreeNode>, cur_func: &MuFunctionVersion) -> P<Value> {
match op.v {
TreeNode_::Instruction(_) => {
self.instruction_select(op, cur_func);
......@@ -646,7 +646,7 @@ impl <'a> InstructionSelection {
}
}
fn emit_general_move(&mut self, src: &P<TreeNode>, dest: &P<Value>, cur_func: &MuFunction) {
fn emit_general_move(&mut self, src: &P<TreeNode>, dest: &P<Value>, cur_func: &MuFunctionVersion) {
let ref dst_ty = dest.ty;
if !types::is_fp(dst_ty) && types::is_scalar(dst_ty) {
......@@ -673,7 +673,7 @@ impl CompilerPass for InstructionSelection {
}
#[allow(unused_variables)]
fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
debug!("{}", self.name());
self.backend.start_code(func.fn_name);
......@@ -685,7 +685,7 @@ impl CompilerPass for InstructionSelection {
}
#[allow(unused_variables)]
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
for block_label in func.block_trace.as_ref().unwrap() {
let block = func.content.as_ref().unwrap().get_block(block_label);
......@@ -709,7 +709,7 @@ impl CompilerPass for InstructionSelection {
}
#[allow(unused_variables)]
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
self.backend.print_cur_code();
let mc = self.backend.finish_code();
......
......@@ -21,7 +21,7 @@ impl CompilerPass for CodeEmission {
self.name
}
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
use std::io::prelude::*;
use std::fs::File;
use std::fs;
......
......@@ -55,7 +55,7 @@ impl CompilerPass for PeepholeOptimization {
self.name
}
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
let compiled_funcs = vm_context.compiled_funcs().read().unwrap();
let mut cf = compiled_funcs.get(func.fn_name).unwrap().borrow_mut();
......
......@@ -262,7 +262,7 @@ pub fn is_machine_reg(reg: MuID) -> bool {
}
// from Tailoring Graph-coloring Register Allocation For Runtime Compilation, Figure 4
pub fn build_chaitin_briggs (cf: &CompiledFunction, func: &MuFunction) -> InterferenceGraph {
pub fn build_chaitin_briggs (cf: &CompiledFunction, func: &MuFunctionVersion) -> InterferenceGraph {
let mut ig = InterferenceGraph::new();
// precolor machine register nodes
......@@ -366,7 +366,7 @@ pub fn build_chaitin_briggs (cf: &CompiledFunction, func: &MuFunction) -> Interf
// from tony's code src/RegAlloc/Liveness.java
// this function is no longer used
#[allow(dead_code)]
pub fn build (cf: &CompiledFunction, func: &MuFunction) -> InterferenceGraph {
pub fn build (cf: &CompiledFunction, func: &MuFunctionVersion) -> InterferenceGraph {
let mut ig = InterferenceGraph::new();
// precolor machine register nodes
......
......@@ -23,7 +23,7 @@ impl RegisterAllocation {
#[allow(unused_variables)]
// returns true if we spill registers (which requires another instruction selection)
fn coloring(&mut self, vm_context: &VMContext, func: &mut MuFunction) -> bool {
fn coloring(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) -> bool {
let compiled_funcs = vm_context.compiled_funcs().read().unwrap();
let mut cf = compiled_funcs.get(func.fn_name).unwrap().borrow_mut();
......@@ -72,7 +72,7 @@ impl CompilerPass for RegisterAllocation {
self.name
}
fn execute(&mut self, vm_context: &VMContext, func: &mut MuFunction) -> PassExecutionResult {
fn execute(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) -> PassExecutionResult {
debug!("---CompilerPass {} for {}---", self.name(), func.fn_name);
if self.coloring(vm_context, func) {
......
......@@ -33,7 +33,7 @@ impl Compiler {
}
}
pub fn compile(&self, func: &mut MuFunction) {
pub fn compile(&self, func: &mut MuFunctionVersion) {
let _p = hprof::enter(func.fn_name);
let mut cur_pass = 0;
......
......@@ -23,7 +23,7 @@ fn check_edge_kind(target: MuTag, stack: &Vec<MuTag>) -> EdgeKind {
}
}
fn new_edge(cur: MuTag, edge: BlockEdge, stack: &mut Vec<MuTag>, visited: &mut Vec<MuTag>, func: &mut MuFunction) {
fn new_edge(cur: MuTag, edge: BlockEdge, stack: &mut Vec<MuTag>, visited: &mut Vec<MuTag>, func: &mut MuFunctionVersion) {
// add current block to target's predecessors
{
let target = func.content.as_mut().unwrap().get_block_mut(edge.target);
......@@ -47,7 +47,7 @@ const WATCHPOINT_DISABLED_CHANCE : f32 = 0.9f32;
const NORMAL_RESUME_CHANCE : f32 = 0.6f32;
const EXN_RESUME_CHANCE : f32 = 1f32 - NORMAL_RESUME_CHANCE;
fn dfs(cur: MuTag, stack: &mut Vec<MuTag>, visited: &mut Vec<MuTag>, func: &mut MuFunction) {
fn dfs(cur: MuTag, stack: &mut Vec<MuTag>, visited: &mut Vec<MuTag>, func: &mut MuFunctionVersion) {
trace!("dfs visiting block {}", cur);
trace!("current stack: {:?}", stack);
trace!("current visited: {:?}", visited);
......@@ -196,7 +196,7 @@ impl CompilerPass for ControlFlowAnalysis {
}
#[allow(unused_variables)]
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
let mut stack : Vec<MuTag> = vec![];
let mut visited : Vec<MuTag> = vec![];
......@@ -204,7 +204,7 @@ impl CompilerPass for ControlFlowAnalysis {
}
#[allow(unused_variables)]
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
debug!("check control flow for {}", func.fn_name);
for entry in func.content.as_ref().unwrap().blocks.iter() {
......
......@@ -63,7 +63,7 @@ impl CompilerPass for DefUse {
}
#[allow(unused_variables)]
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
debug!("check use count for variables");
for entry in func.context.values.values() {
......
......@@ -29,7 +29,7 @@ pub enum PassExecutionResult {
pub trait CompilerPass {
fn name(&self) -> &'static str;
fn execute(&mut self, vm_context: &VMContext, func: &mut MuFunction) -> PassExecutionResult {
fn execute(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) -> PassExecutionResult {
debug!("---CompilerPass {} for {}---", self.name(), func.fn_name);
self.start_function(vm_context, func);
......@@ -41,7 +41,7 @@ pub trait CompilerPass {
PassExecutionResult::ProceedToNext
}
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
for (label, ref mut block) in func.content.as_mut().unwrap().blocks.iter_mut() {
debug!("block: {}", label);
......@@ -59,8 +59,8 @@ pub trait CompilerPass {
}
}
fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {}
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {}
fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {}
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {}
fn start_block(&mut self, vm_context: &VMContext, func_context: &mut FunctionContext, block: &mut Block) {}
fn finish_block(&mut self, vm_context: &VMContext, func_context: &mut FunctionContext, block: &mut Block) {}
......
......@@ -18,7 +18,7 @@ impl CompilerPass for TraceGen {
}
#[allow(unused_variables)]
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn visit_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
// we put the high probability edge into a hot trace, and others into cold paths
// and traverse cold_path later
let trace = {
......@@ -69,7 +69,7 @@ impl CompilerPass for TraceGen {
}
#[allow(unused_variables)]
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
debug!("trace for {}", func.fn_name);
debug!("{:?}", func.block_trace.as_ref().unwrap());
}
......
......@@ -25,7 +25,7 @@ impl CompilerPass for TreeGen {
self.name
}
fn execute(&mut self, vm_context: &VMContext, func: &mut MuFunction) -> PassExecutionResult {
fn execute(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) -> PassExecutionResult {
debug!("---CompilerPass {} for {}---", self.name(), func.fn_name);
{
......@@ -125,7 +125,7 @@ impl CompilerPass for TreeGen {
}
#[allow(unused_variables)]
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
fn finish_function(&mut self, vm_context: &VMContext, func: &mut MuFunctionVersion) {
debug!("check depth tree for {}", func.fn_name);
for entry in func.content.as_ref().unwrap().blocks.iter() {
......
......@@ -12,7 +12,7 @@ pub struct VMContext {
constants: RwLock<HashMap<MuTag, P<Value>>>,
types: RwLock<HashMap<MuTag, P<MuType>>>,
func_sigs: RwLock<HashMap<MuTag, P<MuFuncSig>>>,
funcs: RwLock<HashMap<MuTag, RefCell<MuFunction>>>,
funcs: RwLock<HashMap<MuTag, RefCell<MuFunctionVersion>>>,
compiled_funcs: RwLock<HashMap<MuTag, RefCell<CompiledFunction>>>
}
......@@ -57,7 +57,7 @@ impl <'a> VMContext {
ret
}
pub fn declare_func (&self, func: MuFunction) {
pub fn declare_func (&self, func: MuFunctionVersion) {
let mut funcs = self.funcs.write().unwrap();
debug_assert!(!funcs.contains_key(func.fn_name));
......@@ -70,7 +70,7 @@ impl <'a> VMContext {
self.compiled_funcs.write().unwrap().insert(func.fn_name, RefCell::new(func));
}
pub fn funcs(&self) -> &RwLock<HashMap<MuTag, RefCell<MuFunction>>> {
pub fn funcs(&self) -> &RwLock<HashMap<MuTag, RefCell<MuFunctionVersion>>> {
&self.funcs
}
......
......@@ -38,7 +38,7 @@ pub fn sum() -> VMContext {
let sum_sig = vm.declare_func_sig("sum_sig", vec![type_def_int64.clone()], vec![type_def_int64.clone()]);
// .funcdef @sum VERSION @sum_v1 <@sum_sig>
let mut func = MuFunction::new("sum", sum_sig.clone());
let mut func = MuFunctionVersion::new("sum", sum_sig.clone());
// %entry(<@int_64> %n):
let mut blk_entry = Block::new("entry");
......@@ -181,7 +181,7 @@ pub fn factorial() -> VMContext {
// .funcdef @fac VERSION @fac_v1 <@fac_sig>
let const_func_fac = vm.declare_const("fac", type_def_funcref_fac, Constant::FuncRef("fac"));
let mut func = MuFunction::new("fac", fac_sig.clone());
let mut func = MuFunctionVersion::new("fac", fac_sig.clone());
// %blk_0(<@int_64> %n_3):
let mut blk_0 = Block::new("blk_0");
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment