Unverified Commit 85cdf341 by Enkelmann Committed by GitHub

Assume standard calling convention for internal function calls (#112)

parent 3a76dfaf
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
use fnv::FnvHashMap; use fnv::FnvHashMap;
use petgraph::graph::{DiGraph, EdgeIndex, NodeIndex}; use petgraph::graph::{DiGraph, EdgeIndex, NodeIndex};
use petgraph::visit::EdgeRef; use petgraph::visit::EdgeRef;
use std::collections::{BTreeMap, BinaryHeap}; use std::collections::{BTreeMap, BTreeSet};
/// The context of a fixpoint computation. /// The context of a fixpoint computation.
/// ///
...@@ -83,13 +83,17 @@ pub trait Context { ...@@ -83,13 +83,17 @@ pub trait Context {
/// }; /// };
/// ``` /// ```
pub struct Computation<T: Context> { pub struct Computation<T: Context> {
/// The context object needed for the fixpoint computation
fp_context: T, fp_context: T,
/// maps a node index to its priority (higher priority nodes get stabilized first) /// maps a node index to its priority (higher priority nodes get stabilized first)
node_priority_list: Vec<usize>, node_priority_list: Vec<usize>,
/// maps a priority to the corresponding node index /// maps a priority to the corresponding node index
priority_to_node_list: Vec<NodeIndex>, priority_to_node_list: Vec<NodeIndex>,
worklist: BinaryHeap<usize>, /// The worklist contains the priority numbers (not the node indices!) of nodes marked as not yet stabilized.
worklist: BTreeSet<usize>,
/// The (optional) default value assigned to nodes without an explicit value.
default_value: Option<T::NodeValue>, default_value: Option<T::NodeValue>,
/// The internal map containing all known node values.
node_values: FnvHashMap<NodeIndex, T::NodeValue>, node_values: FnvHashMap<NodeIndex, T::NodeValue>,
} }
...@@ -109,11 +113,11 @@ impl<T: Context> Computation<T> { ...@@ -109,11 +113,11 @@ impl<T: Context> Computation<T> {
node_to_index.insert(node_index, i); node_to_index.insert(node_index, i);
} }
let node_priority_list: Vec<usize> = node_to_index.values().copied().collect(); let node_priority_list: Vec<usize> = node_to_index.values().copied().collect();
let mut worklist = BinaryHeap::new(); let mut worklist = BTreeSet::new();
// If a default value exists, all nodes are added to the worklist. If not, the worklist is empty // If a default value exists, all nodes are added to the worklist. If not, the worklist is empty
if default_value.is_some() { if default_value.is_some() {
for i in 0..sorted_nodes.len() { for i in 0..sorted_nodes.len() {
worklist.push(i); worklist.insert(i);
} }
} }
Computation { Computation {
...@@ -138,7 +142,7 @@ impl<T: Context> Computation<T> { ...@@ -138,7 +142,7 @@ impl<T: Context> Computation<T> {
/// Set the value of a node and mark the node as not yet stabilized. /// Set the value of a node and mark the node as not yet stabilized.
pub fn set_node_value(&mut self, node: NodeIndex, value: T::NodeValue) { pub fn set_node_value(&mut self, node: NodeIndex, value: T::NodeValue) {
self.node_values.insert(node, value); self.node_values.insert(node, value);
self.worklist.push(self.node_priority_list[node.index()]); self.worklist.insert(self.node_priority_list[node.index()]);
} }
/// Merge the value at a node with some new value. /// Merge the value at a node with some new value.
...@@ -185,19 +189,26 @@ impl<T: Context> Computation<T> { ...@@ -185,19 +189,26 @@ impl<T: Context> Computation<T> {
/// If a node does not stabilize after max_steps visits, the end result will not be a fixpoint but only an intermediate result of a fixpoint computation. /// If a node does not stabilize after max_steps visits, the end result will not be a fixpoint but only an intermediate result of a fixpoint computation.
pub fn compute_with_max_steps(&mut self, max_steps: u64) { pub fn compute_with_max_steps(&mut self, max_steps: u64) {
let mut steps = vec![0; self.fp_context.get_graph().node_count()]; let mut steps = vec![0; self.fp_context.get_graph().node_count()];
while let Some(priority) = self.worklist.pop() { let mut non_stabilized_nodes = BTreeSet::new();
while let Some(priority) = self.worklist.iter().next_back().cloned() {
let priority = self.worklist.take(&priority).unwrap();
let node = self.priority_to_node_list[priority]; let node = self.priority_to_node_list[priority];
if steps[node.index()] < max_steps { if steps[node.index()] < max_steps {
steps[node.index()] += 1; steps[node.index()] += 1;
self.update_node(node); self.update_node(node);
} else {
non_stabilized_nodes.insert(priority);
} }
} }
// After the algorithm finished, the new worklist is the list of non-stabilized nodes
self.worklist = non_stabilized_nodes;
} }
/// Compute the fixpoint of the fixpoint problem. /// Compute the fixpoint of the fixpoint problem.
/// If the fixpoint algorithm does not converge to a fixpoint, this function will not terminate. /// If the fixpoint algorithm does not converge to a fixpoint, this function will not terminate.
pub fn compute(&mut self) { pub fn compute(&mut self) {
while let Some(priority) = self.worklist.pop() { while let Some(priority) = self.worklist.iter().next_back().cloned() {
let priority = self.worklist.take(&priority).unwrap();
let node = self.priority_to_node_list[priority]; let node = self.priority_to_node_list[priority];
self.update_node(node); self.update_node(node);
} }
...@@ -222,6 +233,14 @@ impl<T: Context> Computation<T> { ...@@ -222,6 +233,14 @@ impl<T: Context> Computation<T> {
pub fn has_stabilized(&self) -> bool { pub fn has_stabilized(&self) -> bool {
self.worklist.is_empty() self.worklist.is_empty()
} }
/// Return a list of all nodes which are marked as not-stabilized
pub fn get_worklist(&self) -> Vec<NodeIndex> {
self.worklist
.iter()
.map(|priority| self.priority_to_node_list[*priority])
.collect()
}
} }
#[cfg(test)] #[cfg(test)]
......
...@@ -283,6 +283,11 @@ impl<'a, T: Context<'a>> Computation<'a, T> { ...@@ -283,6 +283,11 @@ impl<'a, T: Context<'a>> Computation<'a, T> {
pub fn has_stabilized(&self) -> bool { pub fn has_stabilized(&self) -> bool {
self.generalized_computation.has_stabilized() self.generalized_computation.has_stabilized()
} }
/// Return a list of all nodes which are marked as not-stabilized
pub fn get_worklist(&self) -> Vec<NodeIndex> {
self.generalized_computation.get_worklist()
}
} }
/// Helper function to merge to values wrapped in `Option<..>`. /// Helper function to merge to values wrapped in `Option<..>`.
......
...@@ -90,7 +90,15 @@ impl<'a> crate::analysis::interprocedural_fixpoint::Context<'a> for Context<'a> ...@@ -90,7 +90,15 @@ impl<'a> crate::analysis::interprocedural_fixpoint::Context<'a> for Context<'a>
let address_bytesize = self.project.stack_pointer_register.size; let address_bytesize = self.project.stack_pointer_register.size;
let mut callee_state = state.clone(); let mut callee_state = state.clone();
// Remove virtual register since they do no longer exist in the callee
callee_state.remove_virtual_register(); callee_state.remove_virtual_register();
// Remove callee-saved register, since the callee should not use their values anyway.
// This should prevent recursive references to all stack frames in the call tree
// since the source for it, the stack frame base pointer, is callee-saved.
if let Some(cconv) = self.project.get_standard_calling_convention() {
// Note that this may lead to analysis errors if the function uses another calling convention.
callee_state.remove_callee_saved_register(cconv);
}
// Replace the caller stack ID with one determined by the call instruction. // Replace the caller stack ID with one determined by the call instruction.
// This has to be done *before* adding the new callee stack id to avoid confusing caller and callee stack ids in case of recursive calls. // This has to be done *before* adding the new callee stack id to avoid confusing caller and callee stack ids in case of recursive calls.
callee_state.replace_abstract_id( callee_state.replace_abstract_id(
...@@ -208,6 +216,15 @@ impl<'a> crate::analysis::interprocedural_fixpoint::Context<'a> for Context<'a> ...@@ -208,6 +216,15 @@ impl<'a> crate::analysis::interprocedural_fixpoint::Context<'a> for Context<'a>
state_after_return.readd_caller_objects(state_before_call); state_after_return.readd_caller_objects(state_before_call);
if let Some(cconv) = self.project.get_standard_calling_convention() {
// Restore information about callee-saved register from the caller state.
// TODO: Implement some kind of check to ensure that the callee adheres to the given calling convention!
// The current workaround should be reasonably exact for programs written in C,
// but may introduce a lot of errors
// if the compiler often uses other calling conventions for internal function calls.
state_after_return.restore_callee_saved_register(state_before_call, cconv);
}
// remove non-referenced objects from the state // remove non-referenced objects from the state
state_after_return.remove_unreferenced_objects(); state_after_return.remove_unreferenced_objects();
......
...@@ -282,6 +282,98 @@ impl<'a> PointerInference<'a> { ...@@ -282,6 +282,98 @@ impl<'a> PointerInference<'a> {
self.add_speculative_entry_points(project, false); self.add_speculative_entry_points(project, false);
self.compute(); self.compute();
self.count_blocks_with_state(); self.count_blocks_with_state();
if !self.computation.has_stabilized() {
let worklist_size = self.computation.get_worklist().len();
let _ = self.log_debug(format!(
"Pointer Inference: Fixpoint did not stabilize. Remaining worklist size: {}",
worklist_size
));
}
}
/// Print information on dead ends in the control flow graph for debugging purposes.
/// Ignore returns where there is no known caller stack id.
#[allow(dead_code)]
fn print_cfg_dead_ends(&self) {
let graph = self.computation.get_graph();
for (node_id, node) in graph.node_references() {
if let Some(node_value) = self.computation.get_node_value(node_id) {
if !graph
.neighbors(node_id)
.any(|neighbor| self.computation.get_node_value(neighbor).is_some())
{
match node {
Node::BlkEnd(block, _sub) => {
let state = node_value.unwrap_value();
if block.term.jmps.is_empty() {
println!("Dead end without jumps after block {}", block.tid);
}
for jmp in block.term.jmps.iter() {
match &jmp.term {
Jmp::BranchInd(target_expr) => {
if let Ok(address) = state.eval(&target_expr) {
println!(
"{}: Indirect jump to {}",
jmp.tid,
address.to_json_compact()
);
} else {
println!(
"{}: Indirect jump. Could not compute address",
jmp.tid
);
}
}
Jmp::CallInd { target, return_ } => {
if let Ok(address) = state.eval(&target) {
println!(
"{}: Indirect call to {}. HasReturn: {}",
jmp.tid,
address.to_json_compact(),
return_.is_some()
);
} else {
println!(
"{}: Indirect call. Could not compute address",
jmp.tid
);
}
}
Jmp::Return(_) => {
if !state.caller_stack_ids.is_empty() {
println!(
"{}: Return dead end despite known caller ids",
jmp.tid
)
}
}
_ => println!(
"{}: Unexpected Jmp dead end: {:?}",
jmp.tid, jmp.term
),
}
}
}
Node::BlkStart(block, _sub) => {
println!("{}: ERROR: Block start without successor state!", block.tid)
}
Node::CallReturn { call, return_ } => {
let (call_state, return_state) = match node_value {
NodeValue::CallReturnCombinator { call, return_ } => {
(call.is_some(), return_.is_some())
}
_ => panic!(),
};
println!(
"CallReturn. Caller: ({}, {}), Return: ({}, {})",
call.0.tid, call_state, return_.0.tid, return_state
);
}
}
}
}
}
} }
} }
......
...@@ -109,10 +109,22 @@ impl AbstractObjectInfo { ...@@ -109,10 +109,22 @@ impl AbstractObjectInfo {
} }
/// Get all abstract IDs that the object may contain pointers to. /// Get all abstract IDs that the object may contain pointers to.
pub fn get_referenced_ids(&self) -> &BTreeSet<AbstractIdentifier> { /// This yields an overapproximation of possible pointer targets.
pub fn get_referenced_ids_overapproximation(&self) -> &BTreeSet<AbstractIdentifier> {
&self.pointer_targets &self.pointer_targets
} }
/// Get all abstract IDs for which the object contains pointers to.
/// This yields an underapproximation of pointer targets,
/// since the object may contain pointers that could not be tracked by the analysis.
pub fn get_referenced_ids_underapproximation(&self) -> BTreeSet<AbstractIdentifier> {
let mut referenced_ids = BTreeSet::new();
for data in self.memory.values() {
referenced_ids.append(&mut data.referenced_ids())
}
referenced_ids
}
/// For pointer values replace an abstract identifier with another one and add the offset_adjustment to the pointer offsets. /// For pointer values replace an abstract identifier with another one and add the offset_adjustment to the pointer offsets.
/// This is needed to adjust stack pointers on call and return instructions. /// This is needed to adjust stack pointers on call and return instructions.
pub fn replace_abstract_id( pub fn replace_abstract_id(
...@@ -362,7 +374,7 @@ mod tests { ...@@ -362,7 +374,7 @@ mod tests {
target_map.insert(new_id("time_1", "RBX"), bv(40)); target_map.insert(new_id("time_1", "RBX"), bv(40));
let pointer = PointerDomain::with_targets(target_map.clone()); let pointer = PointerDomain::with_targets(target_map.clone());
object.set_value(pointer.into(), &bv(-15)).unwrap(); object.set_value(pointer.into(), &bv(-15)).unwrap();
assert_eq!(object.get_referenced_ids().len(), 3); assert_eq!(object.get_referenced_ids_overapproximation().len(), 3);
object.replace_abstract_id( object.replace_abstract_id(
&new_id("time_1", "RAX"), &new_id("time_1", "RAX"),
...@@ -401,14 +413,14 @@ mod tests { ...@@ -401,14 +413,14 @@ mod tests {
target_map.insert(new_id("time_1", "RBX"), bv(40)); target_map.insert(new_id("time_1", "RBX"), bv(40));
let pointer = PointerDomain::with_targets(target_map.clone()); let pointer = PointerDomain::with_targets(target_map.clone());
object.set_value(pointer.into(), &bv(-15)).unwrap(); object.set_value(pointer.into(), &bv(-15)).unwrap();
assert_eq!(object.get_referenced_ids().len(), 3); assert_eq!(object.get_referenced_ids_overapproximation().len(), 3);
let ids_to_remove = vec![new_id("time_1", "RAX"), new_id("time_23", "RBX")] let ids_to_remove = vec![new_id("time_1", "RAX"), new_id("time_23", "RBX")]
.into_iter() .into_iter()
.collect(); .collect();
object.remove_ids(&ids_to_remove); object.remove_ids(&ids_to_remove);
assert_eq!( assert_eq!(
object.get_referenced_ids(), object.get_referenced_ids_overapproximation(),
&vec![new_id("time_234", "RAX"), new_id("time_1", "RBX")] &vec![new_id("time_234", "RAX"), new_id("time_1", "RBX")]
.into_iter() .into_iter()
.collect() .collect()
......
...@@ -171,10 +171,28 @@ impl AbstractObjectList { ...@@ -171,10 +171,28 @@ impl AbstractObjectList {
} }
} }
/// Return all IDs that may be referenced by the memory object pointed to by the given ID.
/// The returned set is an overapproximation of the actual referenced IDs.
pub fn get_referenced_ids_overapproximation(
&self,
id: &AbstractIdentifier,
) -> BTreeSet<AbstractIdentifier> {
if let Some((object, _offset)) = self.objects.get(id) {
object.get_referenced_ids_overapproximation().clone()
} else {
BTreeSet::new()
}
}
/// Return all IDs that get referenced by the memory object pointed to by the given ID. /// Return all IDs that get referenced by the memory object pointed to by the given ID.
pub fn get_referenced_ids(&self, id: &AbstractIdentifier) -> &BTreeSet<AbstractIdentifier> { /// The returned set is an underapproximation of the actual referenced IDs,
/// since only still tracked pointers inside the memory object are used to compute it.
pub fn get_referenced_ids_underapproximation(
&self,
id: &AbstractIdentifier,
) -> BTreeSet<AbstractIdentifier> {
if let Some((object, _offset)) = self.objects.get(id) { if let Some((object, _offset)) = self.objects.get(id) {
object.get_referenced_ids() object.get_referenced_ids_underapproximation()
} else { } else {
panic!("Abstract ID not associated to an object") panic!("Abstract ID not associated to an object")
} }
...@@ -240,11 +258,9 @@ impl AbstractObjectList { ...@@ -240,11 +258,9 @@ impl AbstractObjectList {
object_id: &AbstractIdentifier, object_id: &AbstractIdentifier,
new_possible_reference_targets: &BTreeSet<AbstractIdentifier>, new_possible_reference_targets: &BTreeSet<AbstractIdentifier>,
) { ) {
self.objects if let Some((object, _)) = self.objects.get_mut(object_id) {
.get_mut(object_id) object.assume_arbitrary_writes(new_possible_reference_targets);
.unwrap() }
.0
.assume_arbitrary_writes(new_possible_reference_targets);
} }
/// Get the number of objects that are currently tracked. /// Get the number of objects that are currently tracked.
...@@ -425,13 +441,13 @@ mod tests { ...@@ -425,13 +441,13 @@ mod tests {
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
other_obj_list other_obj_list
.get_referenced_ids(&new_id("RSP".into())) .get_referenced_ids_overapproximation(&new_id("RSP".into()))
.len(), .len(),
1 1
); );
assert_eq!( assert_eq!(
*other_obj_list *other_obj_list
.get_referenced_ids(&new_id("RSP".into())) .get_referenced_ids_overapproximation(&new_id("RSP".into()))
.iter() .iter()
.next() .next()
.unwrap(), .unwrap(),
......
...@@ -150,10 +150,8 @@ impl State { ...@@ -150,10 +150,8 @@ impl State {
/// Remove all objects that cannot longer be reached by any known pointer. /// Remove all objects that cannot longer be reached by any known pointer.
/// This does not remove objects, where some caller may still know a pointer to the object. /// This does not remove objects, where some caller may still know a pointer to the object.
/// ///
/// Right now it uses the conservative overapproximation of all possible pointer targets contained in a memory object, /// The function uses an underapproximation of all possible pointer targets contained in a memory object.
/// which will sometimes prevent memory objects from being removed /// This keeps the number of tracked objects reasonably small.
/// even if no actual pointer to it can be reconstructed from the state.
/// This may change in the future if memory consumption is too high (TODO: measure that).
pub fn remove_unreferenced_objects(&mut self) { pub fn remove_unreferenced_objects(&mut self) {
// get all referenced IDs // get all referenced IDs
let mut referenced_ids = BTreeSet::new(); let mut referenced_ids = BTreeSet::new();
...@@ -163,13 +161,37 @@ impl State { ...@@ -163,13 +161,37 @@ impl State {
referenced_ids.insert(self.stack_id.clone()); referenced_ids.insert(self.stack_id.clone());
referenced_ids.append(&mut self.caller_stack_ids.clone()); referenced_ids.append(&mut self.caller_stack_ids.clone());
referenced_ids.append(&mut self.ids_known_to_caller.clone()); referenced_ids.append(&mut self.ids_known_to_caller.clone());
referenced_ids = self.add_recursively_referenced_ids_to_id_set(referenced_ids); referenced_ids = self.add_directly_reachable_ids_to_id_set(referenced_ids);
// remove unreferenced objects // remove unreferenced objects
self.memory.remove_unused_objects(&referenced_ids); self.memory.remove_unused_objects(&referenced_ids);
} }
/// Search (recursively) through all memory objects referenced by the given IDs /// Search (recursively) through all memory objects referenced by the given IDs
/// and all IDs contained in them to the set of IDs. /// and add all IDs reachable through concrete pointers contained in them to the set of IDs.
///
/// This uses an underapproximation of the referenced IDs of a memory object,
/// i.e. IDs may be missing if the analysis lost track of the corresponding pointer.
pub fn add_directly_reachable_ids_to_id_set(
&self,
mut ids: BTreeSet<AbstractIdentifier>,
) -> BTreeSet<AbstractIdentifier> {
let mut unsearched_ids = ids.clone();
while let Some(id) = unsearched_ids.iter().next() {
let id = id.clone();
unsearched_ids.remove(&id);
let memory_ids = self.memory.get_referenced_ids_underapproximation(&id);
for mem_id in memory_ids {
if ids.get(&mem_id).is_none() {
ids.insert(mem_id.clone());
unsearched_ids.insert(mem_id.clone());
}
}
}
ids
}
/// Search (recursively) through all memory objects referenced by the given IDs
/// and add all IDs contained in them to the set of IDs.
/// ///
/// This uses an overapproximation of the referenced IDs of a memory object, /// This uses an overapproximation of the referenced IDs of a memory object,
/// i.e. for a memory object it may add IDs as possible references /// i.e. for a memory object it may add IDs as possible references
...@@ -182,9 +204,9 @@ impl State { ...@@ -182,9 +204,9 @@ impl State {
while let Some(id) = unsearched_ids.iter().next() { while let Some(id) = unsearched_ids.iter().next() {
let id = id.clone(); let id = id.clone();
unsearched_ids.remove(&id); unsearched_ids.remove(&id);
let memory_ids = self.memory.get_referenced_ids(&id); let memory_ids = self.memory.get_referenced_ids_overapproximation(&id);
for mem_id in memory_ids { for mem_id in memory_ids {
if ids.get(mem_id).is_none() { if ids.get(&mem_id).is_none() {
ids.insert(mem_id.clone()); ids.insert(mem_id.clone());
unsearched_ids.insert(mem_id.clone()); unsearched_ids.insert(mem_id.clone());
} }
...@@ -264,6 +286,45 @@ impl State { ...@@ -264,6 +286,45 @@ impl State {
pub fn readd_caller_objects(&mut self, caller_state: &State) { pub fn readd_caller_objects(&mut self, caller_state: &State) {
self.memory.append_unknown_objects(&caller_state.memory); self.memory.append_unknown_objects(&caller_state.memory);
} }
/// Restore the content of callee-saved registers from the caller state.
///
/// This function does not check what the callee state currently contains in these registers.
/// If the callee does not adhere to the given calling convention, this may introduce analysis errors!
/// It will also mask cases,
/// where a callee-saved register was incorrectly modified (e.g. because of a bug in the callee).
pub fn restore_callee_saved_register(
&mut self,
caller_state: &State,
cconv: &CallingConvention,
) {
for (register, value) in caller_state.register.iter() {
if cconv
.callee_saved_register
.iter()
.any(|reg_name| *reg_name == register.name)
{
self.set_register(register, value.clone());
}
}
}
/// Remove all knowledge about the contents of callee-saved registers from the state.
pub fn remove_callee_saved_register(&mut self, cconv: &CallingConvention) {
let mut register_to_remove = Vec::new();
for register in self.register.keys() {
if cconv
.callee_saved_register
.iter()
.any(|reg_name| *reg_name == register.name)
{
register_to_remove.push(register.clone());
}
}
for register in register_to_remove {
self.register.remove(&register);
}
}
} }
impl AbstractDomain for State { impl AbstractDomain for State {
......
...@@ -329,3 +329,80 @@ fn merge_callee_stack_to_caller_stack() { ...@@ -329,3 +329,80 @@ fn merge_callee_stack_to_caller_stack() {
); );
assert_eq!(state.memory.get_all_object_ids().len(), 1); assert_eq!(state.memory.get_all_object_ids().len(), 1);
} }
#[test]
fn remove_and_restore_callee_saved_register() {
let mut state = State::new(&register("RSP"), Tid::new("func_tid"));
let value: Data = Bitvector::from_u64(42).into();
let cconv = CallingConvention::mock();
state.set_register(&register("RBP"), value.clone());
state.set_register(&register("RAX"), value.clone());
let mut callee_state = state.clone();
callee_state.remove_callee_saved_register(&cconv);
assert_eq!(
callee_state.get_register(&register("RBP")).unwrap(),
Data::new_top(ByteSize::new(8))
);
assert_eq!(
callee_state.get_register(&register("RAX")).unwrap(),
value.clone()
);
let other_value: Data = Bitvector::from_u64(13).into();
callee_state.set_register(&register("RAX"), other_value.clone());
callee_state.restore_callee_saved_register(&state, &cconv);
assert_eq!(callee_state.get_register(&register("RBP")).unwrap(), value);
assert_eq!(
callee_state.get_register(&register("RAX")).unwrap(),
other_value
);
}
#[test]
fn reachable_ids_under_and_overapproximation() {
let mut state = State::new(&register("RSP"), Tid::new("func_tid"));
let stack_id = new_id("func_tid", "RSP");
let heap_id = new_id("heap_obj", "RAX");
let stack_address: Data =
PointerDomain::new(stack_id.clone(), Bitvector::from_i64(-8).into()).into();
let heap_address: Data =
PointerDomain::new(heap_id.clone(), Bitvector::from_i64(0).into()).into();
// Add the heap object to the state, so that it can be recursively searched.
state.memory.add_abstract_object(
heap_id.clone(),
Bitvector::from_i64(0).into(),
crate::analysis::pointer_inference::object::ObjectType::Heap,
ByteSize::new(8),
);
state.store_value(&stack_address, &heap_address).unwrap();
let reachable_ids: BTreeSet<AbstractIdentifier> = vec![stack_id.clone()].into_iter().collect();
assert_eq!(
state.add_directly_reachable_ids_to_id_set(reachable_ids.clone()),
vec![stack_id.clone(), heap_id.clone()]
.into_iter()
.collect()
);
assert_eq!(
state.add_recursively_referenced_ids_to_id_set(reachable_ids.clone()),
vec![stack_id.clone(), heap_id.clone()]
.into_iter()
.collect()
);
let _ = state.store_value(
&PointerDomain::new(stack_id.clone(), BitvectorDomain::new_top(ByteSize::new(8))).into(),
&Data::Value(Bitvector::from_i64(42).into()),
);
assert_eq!(
state.add_directly_reachable_ids_to_id_set(reachable_ids.clone()),
vec![stack_id.clone()].into_iter().collect()
);
assert_eq!(
state.add_recursively_referenced_ids_to_id_set(reachable_ids.clone()),
vec![stack_id.clone(), heap_id.clone()]
.into_iter()
.collect()
);
}
...@@ -478,6 +478,17 @@ mod tests { ...@@ -478,6 +478,17 @@ mod tests {
} }
} }
impl CallingConvention {
pub fn mock() -> CallingConvention {
CallingConvention {
name: "__stdcall".to_string(),
parameter_register: vec!["RDI".into()],
callee_saved_register: vec!["RBP".into()],
return_register: vec!["RAX".into()],
}
}
}
impl Project { impl Project {
pub fn mock_empty() -> Project { pub fn mock_empty() -> Project {
Project { Project {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment