Commit d9912d25 authored by qinsoon's avatar qinsoon

remove rustc warnings

parent b8187a00
......@@ -1934,7 +1934,7 @@ impl CodeGenerator for ASMCodeGen {
fn emit_shl_mem64_imm8(&mut self, dest: &P<Value>, src: i8) {
trace!("emit shl {},{} -> {}", dest, src, dest);
let (mem, mut uses) = self.prepare_mem(dest, 4 + 1 + 1 + src.to_string().len() + 1);
let (mem, uses) = self.prepare_mem(dest, 4 + 1 + 1 + src.to_string().len() + 1);
let asm = format!("shlq ${},{}", src, mem);
......@@ -2009,7 +2009,7 @@ impl CodeGenerator for ASMCodeGen {
fn emit_shr_mem64_imm8(&mut self, dest: &P<Value>, src: i8) {
trace!("emit shr {},{} -> {}", dest, src, dest);
let (mem, mut uses) = self.prepare_mem(dest, 4 + 1 + 1 + src.to_string().len() + 1);
let (mem, uses) = self.prepare_mem(dest, 4 + 1 + 1 + src.to_string().len() + 1);
let asm = format!("shrq ${},{}", src, mem);
......@@ -2084,7 +2084,7 @@ impl CodeGenerator for ASMCodeGen {
fn emit_sar_mem64_imm8(&mut self, dest: &P<Value>, src: i8) {
trace!("emit sar {},{} -> {}", dest, src, dest);
let (mem, mut uses) = self.prepare_mem(dest, 4 + 1 + 1 + src.to_string().len() + 1);
let (mem, uses) = self.prepare_mem(dest, 4 + 1 + 1 + src.to_string().len() + 1);
let asm = format!("sarq ${},{}", src, mem);
......
......@@ -696,18 +696,30 @@ impl <'a> InstructionSelection {
let tmp_op = self.emit_ireg(op, f_content, f_context, vm);
let tmp_res = self.get_result_value(node);
let mask = match from_ty_len {
8 => 0xFFi32,
16 => 0xFFFFi32,
32 => 0xFFFFFFFFi32,
_ => unimplemented!()
};
// mov op -> result
self.backend.emit_mov_r64_r64(&tmp_res, &tmp_op);
// and mask result -> result
self.backend.emit_and_r64_imm32(&tmp_res, mask);
if from_ty_len < 32 {
let mask = match to_ty_len {
8 => 0xFFi32,
16 => 0xFFFFi32,
_ => unimplemented!()
};
// mov op -> result
self.backend.emit_mov_r64_r64(&tmp_res, &tmp_op);
// and mask result -> result
self.backend.emit_and_r64_imm32(&tmp_res, mask);
} else if from_ty_len == 32 {
let tmp_mask = self.make_temporary(f_context, UINT64_TYPE.clone(), vm);
self.backend.emit_mov_r64_imm64(&tmp_mask, 0xFFFFFFFF as i64);
// mov op -> result
self.backend.emit_mov_r64_r64(&tmp_res, &tmp_op);
// and mask result -> result
self.backend.emit_and_r64_r64(&tmp_res, &tmp_mask);
} else {
unimplemented!()
}
} else {
panic!("unexpected op (expect ireg): {}", op);
}
......@@ -1057,7 +1069,7 @@ impl <'a> InstructionSelection {
// ASM: %tmp_res = call muentry_alloc_large(%allocator, size, align)
let const_align = self.make_value_int_const(align as u64, vm);
let rets = self.emit_runtime_entry(
self.emit_runtime_entry(
&entrypoints::ALLOC_LARGE,
vec![tmp_allocator, size.clone(), const_align],
Some(vec![tmp_res.clone()]),
......@@ -1142,7 +1154,7 @@ impl <'a> InstructionSelection {
// arg3: align
let const_align= self.make_value_int_const(align as u64, vm);
let rets = self.emit_runtime_entry(
self.emit_runtime_entry(
&entrypoints::ALLOC_SLOW,
vec![tmp_allocator, size.clone(), const_align],
Some(vec![
......@@ -1550,6 +1562,7 @@ impl <'a> InstructionSelection {
self.emit_postcall_convention(&sig, &rets, stack_arg_size, f_context, vm)
}
#[allow(unused_variables)] // resumption not implemented
fn emit_c_call_ir(
&mut self,
inst: &Instruction,
......@@ -1593,7 +1606,7 @@ impl <'a> InstructionSelection {
let rets = inst.value.clone();
match pv.v {
Value_::Constant(Constant::Int(addr)) => unimplemented!(),
Value_::Constant(Constant::Int(_)) => unimplemented!(),
Value_::Constant(Constant::ExternSym(ref func_name)) => {
self.emit_c_call_internal(
func_name.clone(), //func_name: CName,
......@@ -2138,6 +2151,7 @@ impl <'a> InstructionSelection {
}
}
#[allow(unused_variables)]
fn addr_append_index_scale(&mut self, mem: MemoryLocation, index: P<Value>, scale: u8, vm: &VM) -> MemoryLocation {
match mem {
MemoryLocation::Address {base, offset, ..} => {
......@@ -2174,7 +2188,7 @@ impl <'a> InstructionSelection {
trace!("MEM from GETIREF: {}", ret);
ret
}
Instruction_::GetFieldIRef{is_ptr, base, index} => {
Instruction_::GetFieldIRef{base, index, ..} => {
let ref base = ops[base];
let struct_ty = {
......@@ -2191,7 +2205,7 @@ impl <'a> InstructionSelection {
match base.v {
// GETFIELDIREF(GETIREF) -> add FIELD_OFFSET to old offset
TreeNode_::Instruction(Instruction{v: Instruction_::GetIRef(op_index), ref ops, ..}) => {
TreeNode_::Instruction(Instruction{v: Instruction_::GetIRef(_), ..}) => {
let mem = self.emit_get_mem_from_inst_inner(base, f_content, f_context, vm);
let ret = self.addr_const_offset_adjust(mem, field_offset as u64, vm);
......@@ -2214,7 +2228,7 @@ impl <'a> InstructionSelection {
}
}
}
Instruction_::GetVarPartIRef{is_ptr, base} => {
Instruction_::GetVarPartIRef{base, ..} => {
let ref base = ops[base];
let struct_ty = match base.clone_value().ty.get_referenced_ty() {
......@@ -2250,10 +2264,9 @@ impl <'a> InstructionSelection {
}
}
}
Instruction_::ShiftIRef{is_ptr, base, offset} => {
Instruction_::ShiftIRef{base, offset, ..} => {
let ref base = ops[base];
let ref offset = ops[offset];
let tmp_res = self.get_result_value(op);
let ref base_ty = base.clone_value().ty;
let ele_ty = match base_ty.get_referenced_ty() {
......
......@@ -2,7 +2,6 @@ extern crate nalgebra;
use compiler::machine_code::CompiledFunction;
use ast::ir::*;
use ast::types;
use compiler::backend;
use utils::vec_utils;
use utils::LinkedHashSet;
......
use log;
use utils;
use ast::ir::*;
use vm::VM;
......
......@@ -273,6 +273,9 @@ impl MuThread {
}
}
#[allow(unused_unsafe)]
// pieces of this function are not safe (we want to mark it unsafe)
// this function is exposed as unsafe because it is not always safe to call it
pub unsafe fn current_thread_as_mu_thread(threadlocal: Address, vm: Arc<VM>) {
use std::usize;
......
......@@ -14,7 +14,7 @@ pub fn compile_run_c_test(test_file_path: &'static str) -> PathBuf {
use std::fs;
let temp = Path::new("tests/test_jit/temp");
fs::create_dir_all(temp);
fs::create_dir_all(temp).unwrap();
let mut ret = temp.to_path_buf();
ret.push(src.file_stem().unwrap());
......
......@@ -21,7 +21,7 @@ pub fn get_test_clang_path() -> String {
}
}
pub fn exec (mut cmd: Command) -> Output {
pub fn exec (cmd: Command) -> Output {
let output = exec_nocheck(cmd);
assert!(output.status.success());
......
......@@ -961,7 +961,7 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
trace!("Filling struct {} {:?}", id, ty);
match **ty {
NodeType::TypeStruct { id: _, fieldtys: ref fieldtys } => {
NodeType::TypeStruct { id: _, ref fieldtys } => {
let fieldtys_impl = fieldtys.iter().map(|fid| {
self.ensure_type_rec(*fid)
}).collect::<Vec<_>>();
......
......@@ -175,7 +175,7 @@ macro_rules! inst {
target: $dest.id(),
args: {
let mut i =0;
vec![$($arg.clone()),*].iter().map(|x| {let ret = DestArg::Normal(i); i+=1; ret}).collect()
vec![$($arg.clone()),*].iter().map(|_| {let ret = DestArg::Normal(i); i+=1; ret}).collect()
}
})
});
......@@ -189,7 +189,7 @@ macro_rules! inst {
ops: RwLock::new(vec![$($val.clone()), *]),
v: Instruction_::Return({
let mut i = 0;
vec![$($val.clone()), *].iter().map(|x| {let ret = i; i+= 1; ret}).collect()
vec![$($val.clone()), *].iter().map(|_| {let ret = i; i+= 1; ret}).collect()
})
});
};
......
......@@ -2,10 +2,8 @@ use mu::ast::types::*;
use mu::ast::ir::*;
use mu::ast::ptr::*;
use mu::ast::inst::*;
use mu::ast::op::*;
use mu::vm::*;
use mu::compiler::*;
use mu::testutil;
use std::sync::RwLock;
use std::sync::Arc;
......@@ -59,9 +57,7 @@ pub fn gen_ccall_exit(arg: P<TreeNode>, func_ver: &mut MuFunctionVersion, vm: &V
let const_exit = vm.declare_const(vm.next_id(), type_def_ufp_exit.clone(), Constant::ExternSym(C("exit")));
vm.set_name(const_exit.as_entity(), Mu("exit"));
// exprCCALL %const_exit (%const_int32_10) normal: %end(), exception: %end()
let blk_end_id = vm.next_id();
// exprCCALL %const_exit (%const_int32_10)
let const_exit_local = func_ver.new_constant(const_exit.clone());
func_ver.new_inst(Instruction{
......@@ -112,10 +108,8 @@ fn ccall_exit() -> VM {
let mut blk_entry = Block::new(vm.next_id());
vm.set_name(blk_entry.as_entity(), Mu("entry"));
// exprCCALL %const_exit (%const_int32_10) normal: %end(), exception: %end()
let blk_end_id = vm.next_id();
// exprCCALL %const_exit (%const_int32_10)
let const_int32_10_local = func_ver.new_constant(const_int32_10.clone());
let blk_entry_ccall = gen_ccall_exit(const_int32_10_local.clone(), &mut func_ver, &vm);
// RET %const_int32_0
......
......@@ -5,7 +5,6 @@ extern crate libloading;
use self::mu::ast::types::*;
use self::mu::ast::ir::*;
use self::mu::ast::inst::*;
use self::mu::ast::op::*;
use self::mu::vm::*;
use self::mu::testutil;
......
......@@ -6,12 +6,9 @@ use self::mu::ast::ir::*;
use self::mu::ast::inst::*;
use self::mu::ast::op::*;
use self::mu::vm::*;
use self::mu::compiler::*;
use std::sync::RwLock;
use std::sync::Arc;
use mu::testutil;
use mu::testutil::aot;
#[test]
fn test_add_u8() {
......
......@@ -4,12 +4,9 @@ use mu::ast::inst::*;
use mu::ast::op::*;
use mu::vm::*;
use mu::compiler::*;
use mu::runtime::thread::MuThread;
use mu::utils::Address;
use std::sync::Arc;
use std::sync::RwLock;
use std::collections::HashMap;
use mu::testutil::aot;
use test_compiler::test_call::gen_ccall_exit;
......@@ -757,8 +754,6 @@ pub fn hybrid_var_part_insts() -> VM {
let int64_0_local = func_ver.new_constant(int64_0.clone());
let int64_1_local = func_ver.new_constant(int64_1.clone());
let int64_2_local = func_ver.new_constant(int64_2.clone());
let int64_3_local = func_ver.new_constant(int64_3.clone());
let int64_4_local = func_ver.new_constant(int64_4.clone());
let int64_10_local = func_ver.new_constant(int64_10.clone());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment