WARNING! Access to this system is limited to authorised users only.
Unauthorised users may be subject to prosecution.
Unauthorised access to this system is a criminal offence under Australian law (Federal Crimes Act 1914 Part VIA)
It is a criminal offence to:
(1) Obtain access to data without authority. -Penalty 2 years imprisonment.
(2) Damage, delete, alter or insert data without authority. -Penalty 10 years imprisonment.
User activity is monitored and recorded. Anyone using this system expressly consents to such monitoring and recording.

To protect your data, the CISO officer has suggested users to enable 2FA as soon as possible.
Currently 2.7% of users enabled 2FA.

Commit e53f5d4f authored by qinsoon's avatar qinsoon
Browse files

[wip] also verifying spilling

parent 7c234f21
......@@ -11,7 +11,7 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Cargo &lt;mu&gt;" level="project" />
<orderEntry type="library" name="Rust &lt;mu&gt;" level="project" />
<orderEntry type="library" name="Cargo &lt;mu&gt;" level="project" />
</component>
</module>
\ No newline at end of file
......@@ -501,6 +501,28 @@ impl MachineCode for ASMCode {
}
}
fn is_spill_load(&self, index: usize) -> Option<P<Value>> {
if let Some(inst) = self.code.get(index) {
match inst.spill_info {
Some(SpillMemInfo::Load(ref p)) => Some(p.clone()),
_ => None
}
} else {
None
}
}
fn is_spill_store(&self, index: usize) -> Option<P<Value>> {
if let Some(inst) = self.code.get(index) {
match inst.spill_info {
Some(SpillMemInfo::Store(ref p)) => Some(p.clone()),
_ => None
}
} else {
None
}
}
fn get_succs(&self, index: usize) -> &Vec<usize> {
&self.code[index].succs
}
......@@ -3373,14 +3395,16 @@ pub fn spill_rewrite(
spills: &LinkedHashMap<MuID, P<Value>>,
func: &mut MuFunctionVersion,
cf: &mut CompiledFunction,
vm: &VM) -> Vec<P<Value>>
vm: &VM) -> LinkedHashMap<MuID, MuID>
{
trace!("spill rewrite for x86_64 asm backend");
trace!("code before spilling");
cf.mc().trace_mc();
let mut new_nodes = vec![];
// if temp a gets spilled, all its uses and defines will become a use/def of a scratch temp
// we maintain this mapping for later use
let mut spilled_scratch_temps = LinkedHashMap::new();
// record code and their insertion point, so we can do the copy/insertion all at once
let mut spill_code_before: LinkedHashMap<usize, Vec<Box<ASMCode>>> = LinkedHashMap::new();
......@@ -3407,8 +3431,10 @@ pub fn spill_rewrite(
// generate a random new temporary
let temp_ty = val_reg.ty.clone();
let temp = func.new_ssa(vm.next_id(), temp_ty.clone()).clone_value();
vec_utils::add_unique(&mut new_nodes, temp.clone());
// maintain mapping
trace!("reg {} used in Inst{} is replaced as {}", val_reg, i, temp);
spilled_scratch_temps.insert(temp.id(), reg);
// generate a load
let code = {
......@@ -3453,7 +3479,8 @@ pub fn spill_rewrite(
} else {
let temp_ty = val_reg.ty.clone();
let temp = func.new_ssa(vm.next_id(), temp_ty.clone()).clone_value();
vec_utils::add_unique(&mut new_nodes, temp.clone());
spilled_scratch_temps.insert(temp.id(), reg);
temp
};
......@@ -3499,5 +3526,5 @@ pub fn spill_rewrite(
trace!("code after spilling");
cf.mc().trace_mc();
new_nodes
spilled_scratch_temps
}
......@@ -44,7 +44,10 @@ pub struct GraphColoring<'a> {
worklist_spill: Vec<NodeIndex>,
spillable: LinkedHashMap<MuID, bool>,
spilled_nodes: Vec<NodeIndex>,
spill_history: LinkedHashMap<MuID, P<Value>>, // for validation, we need to log all registers get spilled
// for validation
spill_history: LinkedHashMap<MuID, P<Value>>, // we need to log all registers get spilled with their spill location
spill_scratch_temps: LinkedHashMap<MuID, MuID>, // we need to know the mapping between scratch temp -> original temp
worklist_freeze: LinkedHashSet<NodeIndex>,
frozen_moves: LinkedHashSet<Move>,
......@@ -55,10 +58,13 @@ pub struct GraphColoring<'a> {
impl <'a> GraphColoring<'a> {
pub fn start (func: &'a mut MuFunctionVersion, cf: &'a mut CompiledFunction, vm: &'a VM) -> GraphColoring<'a> {
GraphColoring::start_with_spill_history(LinkedHashMap::new(), func, cf, vm)
GraphColoring::start_with_spill_history(LinkedHashMap::new(), LinkedHashMap::new(), func, cf, vm)
}
fn start_with_spill_history(spill_history: LinkedHashMap<MuID, P<Value>>, func: &'a mut MuFunctionVersion, cf: &'a mut CompiledFunction, vm: &'a VM) -> GraphColoring<'a> {
fn start_with_spill_history(spill_history: LinkedHashMap<MuID, P<Value>>,
spill_scratch_temps: LinkedHashMap<MuID, MuID>,
func: &'a mut MuFunctionVersion, cf: &'a mut CompiledFunction, vm: &'a VM) -> GraphColoring<'a>
{
trace!("Initializing coloring allocator...");
cf.mc().trace_mc();
......@@ -94,7 +100,9 @@ impl <'a> GraphColoring<'a> {
worklist_spill: Vec::new(),
spillable: LinkedHashMap::new(),
spilled_nodes: Vec::new(),
spill_history: spill_history,
spill_scratch_temps: spill_scratch_temps,
worklist_freeze: LinkedHashSet::new(),
frozen_moves: LinkedHashSet::new(),
......@@ -184,7 +192,7 @@ impl <'a> GraphColoring<'a> {
self.rewrite_program();
return GraphColoring::start_with_spill_history(self.spill_history.clone(), self.func, self.cf, self.vm);
return GraphColoring::start_with_spill_history(self.spill_history.clone(), self.spill_scratch_temps.clone(), self.func, self.cf, self.vm);
}
self
......@@ -657,7 +665,6 @@ impl <'a> GraphColoring<'a> {
}
}
#[allow(unused_variables)]
fn rewrite_program(&mut self) {
let spills = self.spills();
......@@ -675,8 +682,10 @@ impl <'a> GraphColoring<'a> {
self.spill_history.insert(*reg_id, mem);
}
// though we are not using this right now
let new_temps = backend::spill_rewrite(&spilled_mem, self.func, self.cf, self.vm);
let scratch_temps = backend::spill_rewrite(&spilled_mem, self.func, self.cf, self.vm);
for (k, v) in scratch_temps {
self.spill_scratch_temps.insert(k, v);
}
}
pub fn spills(&self) -> Vec<MuID> {
......@@ -721,6 +730,10 @@ impl <'a> GraphColoring<'a> {
self.spill_history.clone()
}
pub fn get_spill_scratch_temps(&self) -> LinkedHashMap<MuID, MuID> {
self.spill_scratch_temps.clone()
}
pub fn get_coalesced(&self) -> LinkedHashMap<MuID, MuID> {
let mut ret = LinkedHashMap::new();
......
......@@ -42,8 +42,9 @@ impl RegisterAllocation {
let reg_assignment = coloring.get_assignments();
let reg_spilled = coloring.get_spill_history();
let reg_coalesced = coloring.get_coalesced();
let spill_scratch_temps = coloring.get_spill_scratch_temps();
validate::validate_regalloc(&coloring.cf, &coloring.func, reg_assignment, reg_coalesced, reg_spilled)
validate::validate_regalloc(&coloring.cf, &coloring.func, reg_assignment, reg_coalesced, reg_spilled, spill_scratch_temps);
}
// replace regs
......
......@@ -41,45 +41,88 @@ impl AliveEntries {
ret
}
pub fn find_entry_for_temp(&self, temp: MuID) -> Option<&RegisterEntry> {
pub fn has_entries_for_temp(&self, temp: MuID) -> bool {
for entry in self.inner.values() {
if entry.match_temp(temp) {
return Some(entry);
return true;
}
}
None
false
}
pub fn find_entry_for_temp_mut(&mut self, temp: MuID) -> Option<&mut RegisterEntry> {
pub fn find_entries_for_temp(&self, temp: MuID) -> Vec<&RegisterEntry> {
let mut ret = vec![];
for entry in self.inner.values() {
if entry.match_temp(temp) {
ret.push(entry);
}
}
ret
}
pub fn find_entries_for_temp_mut(&mut self, temp: MuID) -> Vec<&mut RegisterEntry> {
let mut ret = vec![];
for entry in self.inner.values_mut() {
if entry.match_temp(temp) {
return Some(entry)
ret.push(entry);
}
}
None
ret
}
pub fn find_entry_for_reg(&self, reg: MuID) -> Option<&RegisterEntry> {
pub fn has_entries_for_reg(&self, reg: MuID) -> bool {
for entry in self.inner.values() {
if entry.match_reg(reg) {
return Some(entry);
return true;
}
}
None
false
}
pub fn find_entry_for_reg_mut(&mut self, reg: MuID) -> Option<&mut RegisterEntry> {
pub fn find_entries_for_reg(&self, reg: MuID) -> Vec<&RegisterEntry> {
let mut ret = vec![];
for entry in self.inner.values() {
if entry.match_reg(reg) {
ret.push(entry);
}
}
ret
}
pub fn find_entries_for_reg_mut(&mut self, reg: MuID) -> Vec<&mut RegisterEntry> {
let mut ret = vec![];
for entry in self.inner.values_mut() {
if entry.match_reg(reg) {
return Some(entry);
ret.push(entry)
}
}
ret
}
None
pub fn has_entries_for_mem(&self, mem: P<Value>) -> bool {
for entry in self.inner.values() {
if entry.match_stack_loc(mem.clone()) {
return true;
}
}
false
}
pub fn find_entries_for_mem(&self, mem: P<Value>) -> Vec<&RegisterEntry> {
let mut ret = vec![];
for entry in self.inner.values() {
if entry.match_stack_loc(mem.clone()) {
ret.push(entry)
}
}
ret
}
pub fn find_entries_for_mem_mut(&mut self, mem: P<Value>) -> Vec<&mut RegisterEntry> {
let mut ret = vec![];
for entry in self.inner.values_mut() {
if entry.match_stack_loc(mem.clone()) {
ret.push(entry)
}
}
ret
}
pub fn new_alive_reg(&mut self, reg: MuID) {
debug!("adding alive reg: {}", reg);
......@@ -110,11 +153,13 @@ impl AliveEntries {
pub fn add_temp_in_reg(&mut self, temp: MuID, reg: MuID) {
debug!("adding alive temp in reg: {} in {}", temp, reg);
let entry_exists = self.find_entry_for_temp(temp).is_some();
let entry_exists = self.has_entries_for_temp(temp);
if entry_exists {
let mut entry = self.find_entry_for_temp_mut(temp).unwrap();
let mut entries = self.find_entries_for_temp_mut(temp);
for entry in entries {
entry.add_real_reg(reg);
}
} else {
let id = self.new_index();
let entry = RegisterEntry {
......@@ -127,6 +172,28 @@ impl AliveEntries {
}
}
pub fn add_temp_in_mem(&mut self, temp: MuID, mem: P<Value>) {
debug!("alive alive temp in mem: {} in {}", temp, mem);
let entry_exists = self.has_entries_for_temp(temp);
if entry_exists {
let mut entries = self.find_entries_for_temp_mut(temp);
for entry in entries {
entry.add_stack_loc(mem.clone());
}
} else {
let id = self.new_index();
let entry = RegisterEntry {
temp: Some(temp),
real: vec![],
stack: vec![mem]
};
self.inner.insert(id, entry);
}
}
pub fn remove_reg(&mut self, reg: MuID) {
debug!("removing alive reg: {}", reg);
let mut indices = vec![];
......@@ -185,6 +252,18 @@ impl RegisterEntry {
self.temp.clone()
}
pub fn remove_real(&mut self, reg: MuID) {
if let Some(index) = vec_utils::find_value(&self.real, reg) {
self.real.remove(index);
}
}
pub fn remove_stack_loc(&mut self, mem: P<Value>) {
if let Some(index) = vec_utils::find_value(&self.stack, mem) {
self.stack.remove(index);
}
}
pub fn match_temp(&self, temp: MuID) -> bool {
if self.temp.is_some() && self.temp.unwrap() == temp {
true
......@@ -197,11 +276,21 @@ impl RegisterEntry {
vec_utils::find_value(&self.real, reg).is_some()
}
pub fn match_stack_loc(&self, mem: P<Value>) -> bool {
vec_utils::find_value(&self.stack, mem).is_some()
}
pub fn add_real_reg(&mut self, reg: MuID) {
if vec_utils::find_value(&mut self.real, reg).is_none() {
self.real.push(reg);
}
}
pub fn add_stack_loc(&mut self, mem: P<Value>) {
if vec_utils::find_value(&mut self.stack, mem.clone()).is_none() {
self.stack.push(mem)
}
}
}
impl fmt::Display for RegisterEntry {
......
......@@ -14,7 +14,8 @@ pub fn validate_regalloc(cf: &CompiledFunction,
func: &MuFunctionVersion,
reg_assigned: LinkedHashMap<MuID, MuID>,
reg_coalesced:LinkedHashMap<MuID, MuID>,
reg_spilled: LinkedHashMap<MuID, P<Value>>)
reg_spilled: LinkedHashMap<MuID, P<Value>>,
spill_scratch_regs: LinkedHashMap<MuID, MuID>)
{
debug!("---Validating register allocation results---");
......@@ -58,15 +59,46 @@ pub fn validate_regalloc(cf: &CompiledFunction,
mc.trace_inst(i);
if mc.is_jmp(i).is_some() {
// we need to flow-sensitive analysis
// we need to do flow-sensitive analysis
unimplemented!();
}
// validate spill
if let Some(spill_loc) = mc.is_spill_load(i) {
// spill load is a move from spill location (mem) to temp
// its define is the scratch temp
let scratch_temp = mc.get_inst_reg_defines(i)[0];
let source_temp = get_source_temp_for_scratch(scratch_temp, &spill_scratch_regs);
add_spill_load(scratch_temp, source_temp, spill_loc, &reg_spilled, &mut alive);
} else if let Some(spill_loc) = mc.is_spill_store(i) {
// spill store is a move from scratch temp to mem
// it uses scratch temp as well as stack pointer (to refer to mem)
// we try to find the scratch temp
let scratch_temp = {
let uses = mc.get_inst_reg_uses(i);
let mut use_temps = vec![];
for reg in uses {
if reg >= MACHINE_ID_END {
use_temps.push(reg)
}
};
assert!(use_temps.len() == 1);
use_temps[0]
};
let source_temp = get_source_temp_for_scratch(scratch_temp, &spill_scratch_regs);
add_spill_store(scratch_temp, source_temp, spill_loc, &reg_spilled, &reg_coalesced, &mut alive);
}
// validate uses of registers
for reg_use in mc.get_inst_reg_uses(i) {
validate_use(reg_use, &reg_assigned, &alive);
}
// remove kills in the inst from alive entries
// remove registers that die at this instruction from alive entries
if let Some(kills) = liveness.get_kills(i) {
for reg in kills.iter() {
kill_reg(*reg, &reg_assigned, &mut alive);
......@@ -90,6 +122,13 @@ pub fn validate_regalloc(cf: &CompiledFunction,
}
}
fn get_source_temp_for_scratch(scratch: MuID, spill_scratch_temps: &LinkedHashMap<MuID, MuID>) -> MuID {
match spill_scratch_temps.get(&scratch) {
Some(src) => get_source_temp_for_scratch(*src, spill_scratch_temps),
None => scratch
}
}
fn get_machine_reg(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>) -> MuID {
// find machine regs
if reg < MACHINE_ID_END {
......@@ -113,13 +152,15 @@ fn validate_use(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, alive: &Ali
let temp = reg;
// ensure temp is assigned to the same machine reg in alive entries
if let Some(entry) = alive.find_entry_for_temp(temp) {
if alive.has_entries_for_temp(temp) {
alive.find_entries_for_temp(temp).iter().inspect(|entry| {
if !entry.match_reg(machine_reg) {
error!("Temp{}/MachineReg{} does not match at this point. ", temp, machine_reg);
error!("Temp{} is assigned as {}", temp, entry);
panic!("validation failed: temp-reg pair doesnt match")
}
});
} else {
error!("Temp{} is not alive at this point. ", temp);
......@@ -130,7 +171,7 @@ fn validate_use(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, alive: &Ali
fn kill_reg(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, alive: &mut AliveEntries) {
if reg < MACHINE_ID_END {
if alive.find_entry_for_reg(reg).is_some() {
if alive.has_entries_for_reg(reg) {
alive.remove_reg(reg);
}
} else {
......@@ -140,36 +181,47 @@ fn kill_reg(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, alive: &mut Ali
}
}
fn is_coalesced(reg1: MuID, reg2: MuID, reg_coalesced: &LinkedHashMap<MuID, MuID>) -> bool {
if reg1 == reg2 {
true
} else if (reg_coalesced.contains_key(&reg1) && *reg_coalesced.get(&reg2).unwrap() == reg1)
|| (reg_coalesced.contains_key(&reg2) && *reg_coalesced.get(&reg1).unwrap() == reg2) {
true
} else {
false
}
}
fn add_def(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, reg_coalesced: &LinkedHashMap<MuID, MuID>, alive: &mut AliveEntries) {
let machine_reg = get_machine_reg(reg, reg_assigned);
let temp = reg;
if reg < MACHINE_ID_END {
// if it is a machine register
// we require either it doesn't have an entry,
// or its entry doesnt have a temp, so that we can safely overwrite it
if alive.find_entry_for_reg(reg).is_none() {
if !alive.has_entries_for_reg(reg) {
// add new machine register
alive.new_alive_reg(reg);
} else if !alive.find_entry_for_reg(reg).unwrap().has_temp() {
// overwrite it
} else if !alive.find_entries_for_reg(reg).iter().any(|entry| entry.has_temp()) {
// overwrite the value that is not used
} else {
let old_temp = alive.find_entry_for_reg(reg).unwrap().get_temp().unwrap();
alive.find_entries_for_reg(reg).iter().inspect(|entry| {
let old_temp = entry.get_temp().unwrap();
error!("Register{}/Temp{} is alive at this point, defining a new value to Register{} is incorrect", reg, old_temp, reg);
});
panic!("validation failed: define a register that is already alive (value overwritten)");
}
} else {
let machine_reg = get_machine_reg(reg, reg_assigned);
let temp = reg;
if alive.find_entry_for_reg(machine_reg).is_none() {
if !alive.has_entries_for_reg(machine_reg) {
// if this register is not alive, we add an entry for it
alive.add_temp_in_reg(temp, machine_reg);
} else {
// otherwise, this register contains some value
{
let entry = alive.find_entry_for_reg_mut(machine_reg).unwrap();
for entry in alive.find_entries_for_reg_mut(machine_reg) {
if !entry.has_temp() {
debug!("adding temp {} to reg {}", temp, machine_reg);
entry.set_temp(temp);
......@@ -177,13 +229,8 @@ fn add_def(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, reg_coalesced: &
// if the register is holding a temporary, it needs to be coalesced with new temp
let old_temp: MuID = entry.get_temp().unwrap();
if old_temp == temp {
// the register that is used at this instruction, and also defined here
if is_coalesced(old_temp, temp, reg_coalesced) {
// safe
} else if (reg_coalesced.contains_key(&old_temp) && *reg_coalesced.get(&old_temp).unwrap() == temp)
|| (reg_coalesced.contains_key(&temp) && *reg_coalesced.get(&temp).unwrap() == old_temp)
{
// coalesced, safe
} else {
// not coalesced, error
error!("Temp{} and Temp{} are not coalesced, but they use the same Register{}", temp, old_temp, machine_reg);
......@@ -192,11 +239,69 @@ fn add_def(reg: MuID, reg_assigned: &LinkedHashMap<MuID, MuID>, reg_coalesced: &
}
}
}
}
// they are coalesced, it is valid
alive.add_temp_in_reg(temp, machine_reg);
}
}
// if other temp use the same register, remove the register from their entry
for entry in alive.find_entries_for_reg_mut(machine_reg) {
if let Some(other_temp) = entry.get_temp() {
if is_coalesced(other_temp, temp, reg_coalesced) {
// do nothing
} else {
entry.remove_real(reg);
}
}
}
}
fn add_spill_store(scratch_temp: MuID, source_temp: MuID, spill_loc: P<Value>,
reg_spilled: &LinkedHashMap<MuID, P<Value>>,
reg_coalesced: &LinkedHashMap<MuID, MuID>,
alive: &mut AliveEntries) {
// add source_temp with mem loc
alive.add_temp_in_mem(source_temp, spill_loc.clone());
// add scratch_temp
alive.add_temp_in_mem(scratch_temp, spill_loc.clone());
// trying to store into a spill location, it is always valid
// but if other temp use the same mem location and it is not coalesced temp,
// we need to delete the mem location from their entry
for entry in alive.find_entries_for_mem_mut(spill_loc.clone()) {
if let Some(temp) = entry.get_temp() {
if is_coalesced(temp, source_temp, reg_coalesced) || is_coalesced(temp, scratch_temp, reg_coalesced) {
// its okay, coalesced temps can have one spill location
} else {
entry.remove_stack_loc(spill_loc.clone())
}
}
}
}
fn add_spill_load(scratch_temp: MuID, source_temp: MuID, spill_loc: P<Value>,
reg_spilled: &LinkedHashMap<MuID, P<Value>>,
alive: &mut AliveEntries) {
// verify its correct: the source temp should be alive with the mem location
if alive.has_entries_for_temp(source_temp) {
alive.find_entries_for_temp(source_temp).iter().inspect(|entry| {
if entry.match_stack_loc(spill_loc.clone()) {
// valid
} else {
error!("SourceTemp{} is alive with the following entry, loading it from {} as ScratchTemp{} is not valid",