GitLab will continue to be upgraded from 11.4.5-ce.0 on November 25th 2019 at 4.00pm (AEDT) to 5.00pm (AEDT) due to Critical Security Patch Availability. During the update, GitLab and Mattermost services will not be available.

Commit b0052927 authored by qinsoon's avatar qinsoon

Merge branch 'develop' into swapstack

parents 1fd56fe4 7c03955f
image: "qinsoon/ubuntu-zebu-test:latest"
stages:
- build
- test
- rustfmt
before_script:
- export PATH=$PATH:/root/.cargo/bin
- export MU_ZEBU=$CI_PROJECT_DIR
- export ZEBU_BUILD=release
- export CARGO_HOME=.cargo
- export CC=clang
- source /home/gitlab-runner/ci/bin/activate
build:
stage: build
......@@ -117,4 +115,4 @@ testjit:som:
rustfmt:
stage: rustfmt
script:
- rustup run nightly cargo fmt -- --write-mode=diff src/ast/src/lib.rs src/gc/src/lib.rs src/utils/src/lib.rs --verbose
- CARGO_HOME=/home/gitlab-runner/.cargo rustup run nightly-2017-07-19 cargo fmt -- --write-mode=diff src/ast/src/lib.rs src/gc/src/lib.rs src/utils/src/lib.rs --verbose
This diff is collapsed.
......@@ -935,7 +935,6 @@ impl TreeNode {
TreeNode_::Value(ref pv) => pv.ty.clone()
}
}
}
impl fmt::Display for TreeNode {
......@@ -1069,7 +1068,7 @@ impl Value {
}
const DISPLAY_ID: bool = true;
const DISPLAY_TYPE: bool = false;
const DISPLAY_TYPE: bool = true;
const PRINT_ABBREVIATE_NAME: bool = true;
impl fmt::Debug for Value {
......
......@@ -13,8 +13,7 @@
// limitations under the License.
#[derive(Copy, Clone, Debug, PartialEq)]
pub
enum BinOp {
pub enum BinOp {
// BinOp Int(n) Int(n) -> Int(n)
Add,
Sub,
......@@ -44,11 +43,7 @@ impl BinOp {
pub fn is_fp(self) -> bool {
use op::BinOp::*;
match self {
FAdd |
FSub |
FMul |
FDiv |
FRem => true,
FAdd | FSub | FMul | FDiv | FRem => true,
_ => false
}
}
......@@ -188,19 +183,21 @@ impl CmpOp {
}
}
pub fn is_fp_cmp(self) -> bool { !self.is_int_cmp() }
pub fn is_fp_cmp(self) -> bool {
!self.is_int_cmp()
}
pub fn is_eq_cmp(self) -> bool {
use op::CmpOp::*;
match self {
EQ | NE => true,
EQ | NE => true,
_ => false
}
}
pub fn is_ult_cmp(self) -> bool {
use op::CmpOp::*;
match self {
UGE | UGT | ULE | ULT => true,
_ => false
UGE | UGT | ULE | ULT => true,
_ => false
}
}
......
......@@ -1584,12 +1584,16 @@ impl<'a> InstructionSelection {
// get thread local and add offset to get sp_loc
let tl = self.emit_get_threadlocal(Some(node), f_content, f_context, vm);
self.backend
.emit_add_r_imm(&tl, *thread::NATIVE_SP_LOC_OFFSET as i32);
self.emit_load_base_offset(
&tl,
&tl,
*thread::NATIVE_SP_LOC_OFFSET as i32,
vm
);
// emit a call to swap_back_to_native_stack(sp_loc: Address)
self.emit_runtime_entry(
&entrypoints::SWAP_BACK_TO_NATIVE_STACK,
&entrypoints::MUENTRY_THREAD_EXIT,
vec![tl.clone()],
None,
Some(node),
......
......@@ -42,6 +42,7 @@ use ast::ptr::P;
use ast::ir::*;
use ast::types::*;
use compiler::backend::RegGroup;
use vm::VM;
use utils::LinkedHashMap;
use std::collections::HashMap;
......@@ -654,7 +655,9 @@ pub fn estimate_insts_for_ir(inst: &Instruction) -> usize {
// runtime call
New(_) | NewHybrid(_, _) => 10,
NewStack(_) | NewThread(_, _) | NewThreadExn(_, _) | NewFrameCursor(_) => 10,
ThreadExit => 10, CurrentStack => 10, KillStack(_) => 10,
ThreadExit => 10,
CurrentStack => 10,
KillStack(_) => 10,
Throw(_) => 10,
SwapStackExpr { .. } | SwapStackExc { .. } | SwapStackKill { .. } => 10,
CommonInst_GetThreadLocal | CommonInst_SetThreadLocal(_) => 10,
......
......@@ -138,6 +138,13 @@ impl HeapDump {
let field_addr = base + *offset;
let edge = unsafe { field_addr.load::<Address>() };
trace!(
"object reference from {} -> {} at +[{}]",
base,
edge,
offset
);
if !edge.is_zero() && !self.objects.contains_key(&edge) {
work_queue.push(edge);
}
......
......@@ -655,8 +655,8 @@ pub fn run_test_2f(vm: &VM, test_name: &str, dep_name: &str, tester_name: &str)
let output_name = test_name.to_string() + "_" + tester_name;
let executable = link_test_primordial(
vec![
test_name.to_string(),
dep_name.to_string(),
test_name.to_string(),
tester_name.to_string(),
],
output_name.as_str(),
......
......@@ -16,7 +16,7 @@
# swap_stack_to(new_sp: Address, entry: Address, old_sp_loc: Address)
# %rdi %rsi %rdx
begin_func swap_to_mu_stack
begin_func muthread_start_pass
# -- on old stack --
# C calling convention
pushq %rbp
......@@ -63,7 +63,7 @@ begin_func swap_to_mu_stack
# push entry function and start it
pushq %rax
ret
end_func swap_to_mu_stack
end_func muthread_start_pass
# _swap_back_to_native_stack(sp_loc: Address)
# %rdi
......
......@@ -26,7 +26,6 @@ macro_rules! assert_ir {
($ cond : expr , $ ( $ arg : tt ) + ) => { debug_assert!($cond, $($arg)+)};
}
pub struct MuIRBuilder {
/// ref to MuVM
mvm: *const MuVM,
......@@ -2429,7 +2428,8 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
let impl_opnd2 = self.get_treenode(fcb, opnd2);
assert_ir!(
impl_opnd1.ty() == impl_opnd2.ty() && impl_opnd1.ty() == impl_ty,
"Invalid instruction {:?}: Operand types {} and {} are not what was expected {}",
"Invalid instruction {:?}: Operand types {} and {} \
are not what was expected {}",
inst,
impl_opnd1.ty(),
impl_opnd2.ty(),
......@@ -3442,7 +3442,8 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
self.build_destination(fcb, ecnode.exc, &mut ops, &[], blocks);
assert_ir!(match **cur_stack_clause {
NodeCurrentStackClause::KillOld { .. } => false, // Can't have an exception
// Can't have an exception
NodeCurrentStackClause::KillOld { .. } => false,
// clause
_ => true
});
......@@ -3832,7 +3833,8 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
}
CMU_CI_UVM_NEW_STACK => {
assert_ir!(
tys.is_empty() && flags.is_empty() && exc_clause.is_none() && keepalives.is_none()
tys.is_empty() && flags.is_empty() && exc_clause.is_none() &&
keepalives.is_none()
);
assert!(sigs.len() == 1);
......@@ -3843,15 +3845,14 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
let impl_sig = self.ensure_sig_rec(sigs[0]);
assert_ir!(impl_sig.ret_tys.is_empty()); // The function isn't supposed to return
assert_ir!(
match impl_opnd.ty().v {
MuType_::FuncRef(ref sig) => *sig == impl_sig,
_ => false
}
);
assert_ir!(match impl_opnd.ty().v {
MuType_::FuncRef(ref sig) => *sig == impl_sig,
_ => false
});
let impl_stackref = self.ensure_stackref();
let impl_rv = self.new_ssa(fcb, result_ids[0], impl_stackref).clone_value();
let impl_rv = self.new_ssa(fcb, result_ids[0], impl_stackref)
.clone_value();
Instruction {
hdr: hdr,
......@@ -3863,13 +3864,15 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
CMU_CI_UVM_CURRENT_STACK => {
assert_ir!(
tys.is_empty() && args.is_empty() && sigs.is_empty() && flags.is_empty() &&
exc_clause.is_none() && keepalives.is_none()
exc_clause.is_none() &&
keepalives.is_none()
);
assert!(result_ids.len() == 1);
let impl_stackref = self.ensure_stackref();
let impl_rv = self.new_ssa(fcb, result_ids[0], impl_stackref).clone_value();
let impl_rv = self.new_ssa(fcb, result_ids[0], impl_stackref)
.clone_value();
Instruction {
hdr: hdr,
......@@ -3880,8 +3883,9 @@ impl<'lb, 'lvm> BundleLoader<'lb, 'lvm> {
}
CMU_CI_UVM_KILL_STACK => {
assert_ir!(
tys.is_empty() && sigs.is_empty() && flags.is_empty() && exc_clause.is_none() && keepalives.is_none()
&& result_ids.is_empty()
tys.is_empty() && sigs.is_empty() && flags.is_empty() &&
exc_clause.is_none() && keepalives.is_none() &&
result_ids.is_empty()
);
assert!(args.len() == 1);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment