GitLab will continue to be upgraded from 11.4.5-ce.0 on November 25th 2019 at 4.00pm (AEDT) to 5.00pm (AEDT) due to Critical Security Patch Availability. During the update, GitLab and Mattermost services will not be available.

Commit e7b85485 authored by qinsoon's avatar qinsoon

[wip] need to avoid using &mut func

parent 2d836b52
......@@ -19,6 +19,9 @@ pub type OpIndex = usize;
#[derive(Debug)]
pub struct MuFunction {
pub fn_name: MuTag,
pub next_id: MuID,
pub sig: P<MuFuncSig>,
pub content: Option<FunctionContent>,
pub context: FunctionContext,
......@@ -26,9 +29,23 @@ pub struct MuFunction {
pub block_trace: Option<Vec<MuTag>> // only available after Trace Generation Pass
}
pub const RESERVED_NODE_IDS_FOR_MACHINE : usize = 100;
impl MuFunction {
pub fn new(fn_name: MuTag, sig: P<MuFuncSig>) -> MuFunction {
MuFunction{fn_name: fn_name, sig: sig, content: None, context: FunctionContext::new(), block_trace: None}
MuFunction{
fn_name: fn_name,
next_id: RESERVED_NODE_IDS_FOR_MACHINE,
sig: sig,
content: None,
context: FunctionContext::new(),
block_trace: None}
}
fn get_id(&mut self) -> MuID {
let ret = self.next_id;
self.next_id += 1;
ret
}
pub fn define(&mut self, content: FunctionContent) {
......@@ -36,7 +53,7 @@ impl MuFunction {
}
pub fn new_ssa(&mut self, tag: MuTag, ty: P<MuType>) -> P<TreeNode> {
let id = TreeNode::get_id();
let id = self.get_id();
self.context.values.insert(id, ValueEntry{id: id, tag: tag, ty: ty.clone(), use_count: Cell::new(0), expr: None});
......@@ -53,11 +70,19 @@ impl MuFunction {
pub fn new_constant(&mut self, v: P<Value>) -> P<TreeNode> {
P(TreeNode{
id: TreeNode::get_id(),
id: self.get_id(),
op: pick_op_code_for_const(&v.ty),
v: TreeNode_::Value(v)
})
}
pub fn new_inst(&mut self, v: Instruction) -> P<TreeNode> {
P(TreeNode{
id: self.get_id(),
op: pick_op_code_for_inst(&v),
v: TreeNode_::Instruction(v),
})
}
}
#[derive(Debug)]
......@@ -199,24 +224,7 @@ pub struct TreeNode {
pub v: TreeNode_,
}
use std::sync::atomic::{Ordering, AtomicUsize, ATOMIC_USIZE_INIT};
static CUR_ID : AtomicUsize = ATOMIC_USIZE_INIT;
impl TreeNode {
pub fn get_id() -> MuID {
let ret = CUR_ID.load(Ordering::SeqCst);
CUR_ID.store(ret + 1, Ordering::SeqCst);
return ret;
}
pub fn new_inst(v: Instruction) -> P<TreeNode> {
P(TreeNode{
id: TreeNode::get_id(),
op: pick_op_code_for_inst(&v),
v: TreeNode_::Instruction(v),
})
}
pub fn extract_ssa_id(&self) -> Option<MuID> {
match self.v {
TreeNode_::Value(ref pv) => {
......
......@@ -94,6 +94,22 @@ impl CodeGenerator for ASMCodeGen {
trace!("emit: jbe {}", dest.target);
}
fn emit_jg(&mut self, dest: &Destination) {
trace!("emit: jg {}", dest.target);
}
fn emit_jge(&mut self, dest: &Destination) {
trace!("emit: jge {}", dest.target);
}
fn emit_jl(&mut self, dest: &Destination) {
trace!("emit: jl {}", dest.target);
}
fn emit_jle(&mut self, dest: &Destination) {
trace!("emit: jle {}", dest.target);
}
fn emit_call(&mut self, func: &P<Value>) {
trace!("emit: call {}", func);
}
......
......@@ -26,6 +26,10 @@ pub trait CodeGenerator {
fn emit_jae(&mut self, dest: &Destination);
fn emit_jb(&mut self, dest: &Destination);
fn emit_jbe(&mut self, dest: &Destination);
fn emit_jg(&mut self, dest: &Destination);
fn emit_jge(&mut self, dest: &Destination);
fn emit_jl(&mut self, dest: &Destination);
fn emit_jle(&mut self, dest: &Destination);
fn emit_call(&mut self, func: &P<Value>);
fn emit_ret(&mut self);
......
......@@ -66,10 +66,14 @@ impl <'a> InstructionSelection {
match self.emit_cmp_res(cond) {
op::CmpOp::EQ => self.backend.emit_je(branch_dest),
op::CmpOp::NE => self.backend.emit_jne(branch_dest),
op::CmpOp::SGE => self.backend.emit_jae(branch_dest),
op::CmpOp::SGT => self.backend.emit_ja(branch_dest),
op::CmpOp::SLE => self.backend.emit_jbe(branch_dest),
op::CmpOp::SLT => self.backend.emit_jb(branch_dest),
op::CmpOp::UGE => self.backend.emit_jae(branch_dest),
op::CmpOp::UGT => self.backend.emit_ja(branch_dest),
op::CmpOp::ULE => self.backend.emit_jbe(branch_dest),
op::CmpOp::ULT => self.backend.emit_jb(branch_dest),
op::CmpOp::SGE => self.backend.emit_jge(branch_dest),
op::CmpOp::SGT => self.backend.emit_jg(branch_dest),
op::CmpOp::SLE => self.backend.emit_jle(branch_dest),
op::CmpOp::SLT => self.backend.emit_jl(branch_dest),
_ => unimplemented!()
}
} else if self.match_ireg(cond) {
......@@ -448,6 +452,8 @@ impl CompilerPass for InstructionSelection {
#[allow(unused_variables)]
fn start_function(&mut self, vm_context: &VMContext, func: &mut MuFunction) {
debug!("{}", self.name());
// init machine registers on the function
}
#[allow(unused_variables)]
......
......@@ -9,6 +9,10 @@ pub use compiler::backend::x86_64::asm_backend::ASMCodeGen;
use ast::ptr::P;
use ast::ir::*;
lazy_static! {
}
pub fn is_valid_x86_imm(op: &P<Value>) -> bool {
use std::u32;
match op.v {
......
......@@ -56,7 +56,7 @@ impl CompilerPass for TreeGen {
let expr = entry_value.expr.take().unwrap();
trace!("{} replaced by {}", ops[index], expr);
ops[index] = TreeNode::new_inst(expr);
ops[index] = func.new_inst(expr);
}
} else {
trace!("{} cant be replaced", ops[index]);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment