GitLab will continue to be upgraded from 11.4.5-ce.0 on November 25th 2019 at 4.00pm (AEDT) to 5.00pm (AEDT) due to Critical Security Patch Availability. During the update, GitLab and Mattermost services will not be available.

mod.rs 5.45 KB
Newer Older
1
pub mod inst_sel;
2
pub mod reg_alloc;
3
pub mod peephole_opt;
4
pub mod code_emission;
5

qinsoon's avatar
qinsoon committed
6
use utils::ByteSize;
qinsoon's avatar
qinsoon committed
7

8 9 10
pub type Word = usize;
pub const WORD_SIZE : ByteSize = 8;

11 12 13
pub const AOT_EMIT_DIR : &'static str = "emit";
pub const AOT_EMIT_CONTEXT_FILE : &'static str = "context.s";

qinsoon's avatar
qinsoon committed
14 15 16 17
// this is not full name, but pro/epilogue name is generated from this
pub const PROLOGUE_BLOCK_NAME: &'static str = "prologue";
pub const EPILOGUE_BLOCK_NAME: &'static str = "epilogue";

qinsoon's avatar
qinsoon committed
18
// X86_64
19

20
#[cfg(target_arch = "x86_64")]
21
#[path = "arch/x86_64/mod.rs"]
22
pub mod x86_64;
23

qinsoon's avatar
qinsoon committed
24
#[cfg(target_arch = "x86_64")]
25
pub use compiler::backend::x86_64::init_machine_regs_for_func;
qinsoon's avatar
qinsoon committed
26 27

#[cfg(target_arch = "x86_64")]
28
pub use compiler::backend::x86_64::number_of_regs_in_group;
qinsoon's avatar
qinsoon committed
29
#[cfg(target_arch = "x86_64")]
30 31 32
pub use compiler::backend::x86_64::number_of_all_regs;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::all_regs;
33
#[cfg(target_arch = "x86_64")]
34 35
pub use compiler::backend::x86_64::all_usable_regs;
#[cfg(target_arch = "x86_64")]
36
pub use compiler::backend::x86_64::pick_group_for_reg;
37 38
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::is_callee_saved;
39 40 41 42
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_code;
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::emit_context;
43 44
#[cfg(target_arch = "x86_64")]
pub use compiler::backend::x86_64::spill_rewrite;
45

qinsoon's avatar
qinsoon committed
46 47
// ARM

48 49 50
#[cfg(target_arch = "arm")]
#[path = "arch/arm/mod.rs"]
mod arm;
qinsoon's avatar
qinsoon committed
51 52 53

// common data structure with target specific info

qinsoon's avatar
qinsoon committed
54
use vm::VM;
qinsoon's avatar
qinsoon committed
55 56
use ast::types::*;
use ast::ptr::*;
57
use ast::ir::*;
qinsoon's avatar
qinsoon committed
58
pub fn resolve_backend_type_info (ty: &MuType, vm: &VM) -> BackendTypeInfo {
qinsoon's avatar
qinsoon committed
59
    match ty.v {
qinsoon's avatar
qinsoon committed
60
        // integral
qinsoon's avatar
qinsoon committed
61
        MuType_::Int(size_in_bit) => {
qinsoon's avatar
qinsoon committed
62 63 64 65 66 67 68 69 70
            match size_in_bit {
                8  => BackendTypeInfo{size: 1, alignment: 1, struct_layout: None},
                16 => BackendTypeInfo{size: 2, alignment: 2, struct_layout: None},
                32 => BackendTypeInfo{size: 4, alignment: 4, struct_layout: None},
                64 => BackendTypeInfo{size: 8, alignment: 8, struct_layout: None},
                _ => unimplemented!()
            }
        },
        // pointer of any type
qinsoon's avatar
qinsoon committed
71 72 73 74 75 76 77 78 79
        MuType_::Ref(_)
        | MuType_::IRef(_)
        | MuType_::WeakRef(_)
        | MuType_::UPtr(_)
        | MuType_::FuncRef(_)
        | MuType_::UFuncPtr(_)
        | MuType_::Tagref64
        | MuType_::ThreadRef
        | MuType_::StackRef => BackendTypeInfo{size: 8, alignment: 8, struct_layout: None},
qinsoon's avatar
qinsoon committed
80
        // floating point
qinsoon's avatar
qinsoon committed
81 82
        MuType_::Float => BackendTypeInfo{size: 4, alignment: 4, struct_layout: None},
        MuType_::Double => BackendTypeInfo{size: 8, alignment: 8, struct_layout: None},
qinsoon's avatar
qinsoon committed
83
        // array
qinsoon's avatar
qinsoon committed
84
        MuType_::Array(ref ty, len) => {
85
            let ele_ty = vm.get_backend_type_info(ty.id());
qinsoon's avatar
qinsoon committed
86 87 88 89
            
            BackendTypeInfo{size: ele_ty.size * len, alignment: ele_ty.alignment, struct_layout: None}
        }
        // struct
90
        MuType_::Struct(ref name) => {
qinsoon's avatar
qinsoon committed
91 92 93 94 95 96 97 98 99 100 101
            let read_lock = STRUCT_TAG_MAP.read().unwrap();
            let struc = read_lock.get(name).unwrap();
            let tys = struc.get_tys();            
            
            trace!("layout struct: {}", struc);
            layout_struct(tys, vm)
        }
        // hybrid 
        // - align is the most strict aligned element (from all fix tys and var ty)
        // - size is fixed tys size
        // - layout is fixed tys layout
qinsoon's avatar
qinsoon committed
102
        MuType_::Hybrid(ref fix_tys, ref var_ty) => {
qinsoon's avatar
qinsoon committed
103 104 105 106
            // treat fix_tys as struct
            let mut ret = layout_struct(fix_tys, vm);
            
            // treat var_ty as array (getting its alignment)
107
            let var_align = vm.get_backend_type_info(var_ty.id()).alignment;
qinsoon's avatar
qinsoon committed
108 109 110 111 112 113 114 115
            
            if ret.alignment < var_align {
                ret.alignment = var_align;
            }
            
            ret
        }
        // void
qinsoon's avatar
qinsoon committed
116
        MuType_::Void => BackendTypeInfo{size: 0, alignment: 8, struct_layout: None},
qinsoon's avatar
qinsoon committed
117
        // vector
qinsoon's avatar
qinsoon committed
118
        MuType_::Vector(_, _) => unimplemented!()
qinsoon's avatar
qinsoon committed
119 120 121
    }
}

qinsoon's avatar
qinsoon committed
122
fn layout_struct(tys: &Vec<P<MuType>>, vm: &VM) -> BackendTypeInfo {
qinsoon's avatar
qinsoon committed
123 124 125 126 127
    let mut offsets : Vec<ByteSize> = vec![];
    let mut cur : ByteSize = 0;
    let mut struct_align : ByteSize = 0;
    
    for ty in tys.iter() {
128
        let ty_info = vm.get_backend_type_info(ty.id());
qinsoon's avatar
qinsoon committed
129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158
        trace!("examining field: {}, {:?}", ty, ty_info);
        
        let align = ty_info.alignment;
        if struct_align < align {
            struct_align = align;
        }
        
        if cur % align != 0 {
            // move cursor to next aligned offset
            cur = (cur / align + 1) * align;
        }
        
        offsets.push(cur);
        trace!("aligned to {}", cur);
        
        cur += ty_info.size;
    }
    
    // if we need padding at the end
    if cur % struct_align != 0 {
        cur = (cur / struct_align + 1) * struct_align;
    }
    
    BackendTypeInfo {
        size: cur,
        alignment: struct_align,
        struct_layout: Some(offsets)
    }
}

159 160 161 162 163 164
pub fn sequetial_layout(tys: &Vec<P<MuType>>, vm: &VM) -> (ByteSize, ByteSize, Vec<ByteSize>) {
    let ret = layout_struct(tys, vm);
    
    (ret.size, ret.alignment, ret.struct_layout.unwrap())
} 

qinsoon's avatar
qinsoon committed
165
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
qinsoon's avatar
qinsoon committed
166
pub struct BackendTypeInfo {
qinsoon's avatar
qinsoon committed
167 168 169
    pub size: ByteSize,
    pub alignment: ByteSize,
    pub struct_layout: Option<Vec<ByteSize>>
qinsoon's avatar
qinsoon committed
170 171
}

qinsoon's avatar
qinsoon committed
172
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
qinsoon's avatar
qinsoon committed
173
pub enum RegGroup {GPR, FPR}