Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
C
cwe_checker
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
fact-depend
cwe_checker
Commits
05843314
Unverified
Commit
05843314
authored
Sep 06, 2021
by
Enkelmann
Committed by
GitHub
Sep 06, 2021
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Refactor AbstractObject and AbstractObjectList for usage with new DataDomain (#223)
parent
323d070e
Show whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
1245 additions
and
1177 deletions
+1245
-1177
mem_region.rs
src/cwe_checker_lib/src/abstract_domain/mem_region.rs
+15
-19
object.rs
src/cwe_checker_lib/src/analysis/pointer_inference/object.rs
+0
-558
id_manipulation.rs
.../src/analysis/pointer_inference/object/id_manipulation.rs
+59
-0
mod.rs
..._checker_lib/src/analysis/pointer_inference/object/mod.rs
+298
-0
tests.rs
...hecker_lib/src/analysis/pointer_inference/object/tests.rs
+145
-0
value_access.rs
...lib/src/analysis/pointer_inference/object/value_access.rs
+107
-0
object_list.rs
...checker_lib/src/analysis/pointer_inference/object_list.rs
+0
-599
cwe_helpers.rs
...src/analysis/pointer_inference/object_list/cwe_helpers.rs
+137
-0
id_manipulation.rs
...analysis/pointer_inference/object_list/id_manipulation.rs
+55
-0
list_manipulation.rs
...alysis/pointer_inference/object_list/list_manipulation.rs
+79
-0
mod.rs
...ker_lib/src/analysis/pointer_inference/object_list/mod.rs
+190
-0
tests.rs
...r_lib/src/analysis/pointer_inference/object_list/tests.rs
+157
-0
tests.rs
...checker_lib/src/analysis/pointer_inference/state/tests.rs
+3
-1
No files found.
src/cwe_checker_lib/src/abstract_domain/mem_region.rs
View file @
05843314
...
...
@@ -110,8 +110,10 @@ impl<T: AbstractDomain + SizedDomain + HasTop + std::fmt::Debug> MemRegion<T> {
/// Clear all values that might be fully or partially overwritten if one writes a value with byte size `value_size`
/// to an offset contained in the interval from `start` to `end` (both bounds included in the interval).
///
/// This represents the effect of writing an arbitrary value (with known byte size)
/// to an arbitrary offset contained in the interval.
/// This represents the effect of writing arbitrary values (with known byte size)
/// to arbitrary offsets contained in the interval.
/// Note that if one only wants to mark values in the interval as potentially overwritten without deleting them,
/// then one should use the [`MemRegion::mark_interval_values_as_top`] method instead.
pub
fn
clear_offset_interval
(
&
mut
self
,
start
:
i64
,
end
:
i64
,
value_size
:
ByteSize
)
{
let
size
=
end
-
start
+
(
u64
::
from
(
value_size
)
as
i64
);
self
.clear_interval
(
start
,
size
);
...
...
@@ -247,6 +249,16 @@ impl<T: AbstractDomain + SizedDomain + HasTop + std::fmt::Debug> MemRegion<T> {
}
}
/// Emulate a write operation to an unknown offset by merging all values with `Top`
/// to indicate that they may have been overwritten
pub
fn
mark_all_values_as_top
(
&
mut
self
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
for
value
in
inner
.values
.values_mut
()
{
*
value
=
value
.merge
(
&
value
.top
());
}
self
.clear_top_values
();
}
/// Merge two memory regions.
///
/// Values at the same position and with the same size get merged via their merge function.
...
...
@@ -323,24 +335,8 @@ impl<T: AbstractDomain + SizedDomain + HasTop + std::fmt::Debug> MemRegion<T> {
/// Remove all values representing the *Top* element from the internal value store,
/// as these should not be saved in the internal representation.
pub
fn
clear_top_values
(
&
mut
self
)
{
let
indices_to_remove
:
Vec
<
i64
>
=
self
.inner
.values
.iter
()
.filter_map
(
|(
index
,
value
)|
{
if
value
.is_top
()
{
Some
(
*
index
)
}
else
{
None
}
},
)
.collect
();
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
for
index
in
indices_to_remove
{
inner
.values
.remove
(
&
index
);
}
inner
.values
.retain
(|
_key
,
value
|
!
value
.is_top
());
}
}
...
...
src/cwe_checker_lib/src/analysis/pointer_inference/object.rs
deleted
100644 → 0
View file @
323d070e
//! This module contains the definition of the abstract memory object type.
use
super
::{
Data
,
ValueDomain
};
use
crate
::
abstract_domain
::
*
;
use
crate
::
prelude
::
*
;
use
derive_more
::
Deref
;
use
serde
::{
Deserialize
,
Serialize
};
use
std
::
collections
::
BTreeSet
;
use
std
::
ops
::
DerefMut
;
use
std
::
sync
::
Arc
;
/// A wrapper struct wrapping `AbstractObjectInfo` in an `Arc`.
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Clone,
Deref)]
#[deref(forward)]
pub
struct
AbstractObject
(
Arc
<
AbstractObjectInfo
>
);
impl
DerefMut
for
AbstractObject
{
fn
deref_mut
(
&
mut
self
)
->
&
mut
AbstractObjectInfo
{
Arc
::
make_mut
(
&
mut
self
.
0
)
}
}
impl
AbstractObject
{
/// Create a new abstract object with given object type and address bitsize.
pub
fn
new
(
type_
:
ObjectType
,
address_bytesize
:
ByteSize
)
->
AbstractObject
{
AbstractObject
(
Arc
::
new
(
AbstractObjectInfo
::
new
(
type_
,
address_bytesize
)))
}
/// Short-circuits the `AbstractObjectInfo::merge` function if `self==other`.
pub
fn
merge
(
&
self
,
other
:
&
Self
)
->
Self
{
if
self
==
other
{
self
.clone
()
}
else
{
AbstractObject
(
Arc
::
new
(
self
.
0
.merge
(
other
)))
}
}
}
/// The abstract object info contains all information that we track for an abstract object.
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Clone)]
pub
struct
AbstractObjectInfo
{
/// An upper approximation of all possible targets for which pointers may exist inside the memory region.
pointer_targets
:
BTreeSet
<
AbstractIdentifier
>
,
/// Tracks whether this may represent more than one actual memory object.
pub
is_unique
:
bool
,
/// Is the object alive or already destroyed
state
:
ObjectState
,
/// Is the object a stack frame or a heap object
type_
:
Option
<
ObjectType
>
,
/// The actual content of the memory object
memory
:
MemRegion
<
Data
>
,
/// The smallest index still contained in the memory region.
/// A `Top` value represents an unknown bound.
/// The bound is not enforced, i.e. reading and writing to indices violating the bound is still allowed.
lower_index_bound
:
BitvectorDomain
,
/// The largest index still contained in the memory region.
/// A `Top` value represents an unknown bound.
/// The bound is not enforced, i.e. reading and writing to indices violating the bound is still allowed.
upper_index_bound
:
BitvectorDomain
,
}
impl
AbstractObjectInfo
{
/// Create a new abstract object with known object type and address bitsize
pub
fn
new
(
type_
:
ObjectType
,
address_bytesize
:
ByteSize
)
->
AbstractObjectInfo
{
AbstractObjectInfo
{
pointer_targets
:
BTreeSet
::
new
(),
is_unique
:
true
,
state
:
ObjectState
::
Alive
,
type_
:
Some
(
type_
),
memory
:
MemRegion
::
new
(
address_bytesize
),
lower_index_bound
:
BitvectorDomain
::
Top
(
address_bytesize
),
upper_index_bound
:
BitvectorDomain
::
Top
(
address_bytesize
),
}
}
/// Set the lower index bound that is still considered to be contained in the abstract object.
pub
fn
set_lower_index_bound
(
&
mut
self
,
lower_bound
:
BitvectorDomain
)
{
self
.lower_index_bound
=
lower_bound
;
}
/// Set the upper index bound that is still considered to be contained in the abstract object.
pub
fn
set_upper_index_bound
(
&
mut
self
,
upper_bound
:
BitvectorDomain
)
{
self
.upper_index_bound
=
upper_bound
;
}
/// Check whether a memory access to the abstract object at the given offset
/// and with the given size of the accessed value is contained in the bounds of the memory object.
/// If `offset` contains more than one possible index value,
/// then only return `true` if the access is contained in the abstract object for all possible offset values.
pub
fn
access_contained_in_bounds
(
&
self
,
offset
:
&
ValueDomain
,
size
:
ByteSize
)
->
bool
{
if
let
Ok
(
offset_interval
)
=
offset
.try_to_interval
()
{
if
let
Ok
(
lower_bound
)
=
self
.lower_index_bound
.try_to_bitvec
()
{
if
lower_bound
.checked_sgt
(
&
offset_interval
.start
)
.unwrap
()
{
return
false
;
}
}
if
let
Ok
(
upper_bound
)
=
self
.upper_index_bound
.try_to_bitvec
()
{
let
mut
size_as_bitvec
=
Bitvector
::
from_u64
(
u64
::
from
(
size
));
match
offset
.bytesize
()
.cmp
(
&
size_as_bitvec
.bytesize
())
{
std
::
cmp
::
Ordering
::
Less
=>
size_as_bitvec
.truncate
(
offset
.bytesize
())
.unwrap
(),
std
::
cmp
::
Ordering
::
Greater
=>
{
size_as_bitvec
.sign_extend
(
offset
.bytesize
())
.unwrap
()
}
std
::
cmp
::
Ordering
::
Equal
=>
(),
}
let
max_index
=
if
let
Some
(
val
)
=
offset_interval
.end
.signed_add_overflow_checked
(
&
size_as_bitvec
)
{
val
-
&
Bitvector
::
one
(
offset
.bytesize
()
.into
())
}
else
{
return
false
;
// The max index already causes an integer overflow
};
if
upper_bound
.checked_slt
(
&
max_index
)
.unwrap
()
{
return
false
;
}
}
true
}
else
{
false
}
}
/// Read the value at the given offset of the given size (in bits, not bytes) inside the memory region.
pub
fn
get_value
(
&
self
,
offset
:
Bitvector
,
bytesize
:
ByteSize
)
->
Data
{
self
.memory
.get
(
offset
,
bytesize
)
}
/// Write a value at the given offset to the memory region.
///
/// If the abstract object is not unique (i.e. may represent more than one actual object),
/// merge the old value at the given offset with the new value.
pub
fn
set_value
(
&
mut
self
,
value
:
Data
,
offset
:
&
ValueDomain
)
->
Result
<
(),
Error
>
{
self
.pointer_targets
.extend
(
value
.referenced_ids
()
.cloned
());
if
let
Ok
(
concrete_offset
)
=
offset
.try_to_bitvec
()
{
if
self
.is_unique
{
self
.memory
.add
(
value
,
concrete_offset
);
}
else
{
let
merged_value
=
self
.memory
.get
(
concrete_offset
.clone
(),
value
.bytesize
())
.merge
(
&
value
);
self
.memory
.add
(
merged_value
,
concrete_offset
);
};
}
else
if
let
Ok
((
start
,
end
))
=
offset
.try_to_offset_interval
()
{
self
.memory
.clear_offset_interval
(
start
,
end
,
value
.bytesize
());
}
else
{
self
.memory
=
MemRegion
::
new
(
self
.memory
.get_address_bytesize
());
}
Ok
(())
}
/// Merge `value` at position `offset` with the value currently saved at that position.
pub
fn
merge_value
(
&
mut
self
,
value
:
Data
,
offset
:
&
ValueDomain
)
{
self
.pointer_targets
.extend
(
value
.referenced_ids
()
.cloned
());
if
let
Ok
(
concrete_offset
)
=
offset
.try_to_bitvec
()
{
let
merged_value
=
self
.memory
.get
(
concrete_offset
.clone
(),
value
.bytesize
())
.merge
(
&
value
);
self
.memory
.add
(
merged_value
,
concrete_offset
);
}
else
if
let
Ok
((
start
,
end
))
=
offset
.try_to_offset_interval
()
{
self
.memory
.clear_offset_interval
(
start
,
end
,
value
.bytesize
());
}
else
{
self
.memory
=
MemRegion
::
new
(
self
.memory
.get_address_bytesize
());
}
}
/// Get all abstract IDs that the object may contain pointers to.
/// This yields an overapproximation of possible pointer targets.
pub
fn
get_referenced_ids_overapproximation
(
&
self
)
->
&
BTreeSet
<
AbstractIdentifier
>
{
&
self
.pointer_targets
}
/// Get all abstract IDs for which the object contains pointers to.
/// This yields an underapproximation of pointer targets,
/// since the object may contain pointers that could not be tracked by the analysis.
pub
fn
get_referenced_ids_underapproximation
(
&
self
)
->
BTreeSet
<
AbstractIdentifier
>
{
let
mut
referenced_ids
=
BTreeSet
::
new
();
for
data
in
self
.memory
.values
()
{
referenced_ids
.extend
(
data
.referenced_ids
()
.cloned
())
}
referenced_ids
}
/// For pointer values replace an abstract identifier with another one and add the offset_adjustment to the pointer offsets.
/// This is needed to adjust stack pointers on call and return instructions.
pub
fn
replace_abstract_id
(
&
mut
self
,
old_id
:
&
AbstractIdentifier
,
new_id
:
&
AbstractIdentifier
,
offset_adjustment
:
&
ValueDomain
,
)
{
for
elem
in
self
.memory
.values_mut
()
{
elem
.replace_abstract_id
(
old_id
,
new_id
,
offset_adjustment
);
}
self
.memory
.clear_top_values
();
if
self
.pointer_targets
.get
(
old_id
)
.is_some
()
{
self
.pointer_targets
.remove
(
old_id
);
self
.pointer_targets
.insert
(
new_id
.clone
());
}
}
/// If `self.is_unique==true`, set the state of the object. Else merge the new state with the old.
pub
fn
set_state
(
&
mut
self
,
new_state
:
ObjectState
)
{
if
self
.is_unique
{
self
.state
=
new_state
;
}
else
{
self
.state
=
self
.state
.merge
(
new_state
);
}
}
/// Remove the provided IDs from the target lists of all pointers in the memory object.
/// Also remove them from the pointer_targets list.
///
/// If this operation would produce an empty value, it replaces it with a `Top` value instead.
pub
fn
remove_ids
(
&
mut
self
,
ids_to_remove
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
self
.pointer_targets
=
self
.pointer_targets
.difference
(
ids_to_remove
)
.cloned
()
.collect
();
for
value
in
self
.memory
.values_mut
()
{
value
.remove_ids
(
ids_to_remove
);
if
value
.is_empty
()
{
*
value
=
value
.top
();
}
}
self
.memory
.clear_top_values
();
// In case the previous operation left *Top* values in the memory struct.
}
/// Get the state of the memory object.
pub
fn
get_state
(
&
self
)
->
ObjectState
{
self
.state
}
/// Get the type of the memory object.
pub
fn
get_object_type
(
&
self
)
->
Option
<
ObjectType
>
{
self
.type_
}
/// Invalidates all memory and adds the `additional_targets` to the pointer targets.
/// Represents the effect of unknown write instructions to the object
/// which may include writing pointers to targets from the `additional_targets` set to the object.
pub
fn
assume_arbitrary_writes
(
&
mut
self
,
additional_targets
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
self
.memory
=
MemRegion
::
new
(
self
.memory
.get_address_bytesize
());
self
.pointer_targets
.extend
(
additional_targets
.iter
()
.cloned
());
}
/// Mark the memory object as freed.
/// Returns an error if a possible double free is detected
/// or the memory object may not be a heap object.
pub
fn
mark_as_freed
(
&
mut
self
)
->
Result
<
(),
Error
>
{
if
self
.type_
!=
Some
(
ObjectType
::
Heap
)
{
self
.set_state
(
ObjectState
::
Flagged
);
return
Err
(
anyhow!
(
"Free operation on possibly non-heap memory object"
));
}
match
(
self
.is_unique
,
self
.state
)
{
(
true
,
ObjectState
::
Alive
)
|
(
true
,
ObjectState
::
Flagged
)
=>
{
self
.state
=
ObjectState
::
Dangling
;
Ok
(())
}
(
false
,
ObjectState
::
Flagged
)
=>
{
self
.state
=
ObjectState
::
Unknown
;
Ok
(())
}
(
true
,
_
)
|
(
false
,
ObjectState
::
Dangling
)
=>
{
self
.state
=
ObjectState
::
Flagged
;
Err
(
anyhow!
(
"Object may already have been freed"
))
}
(
false
,
_
)
=>
{
self
.state
=
ObjectState
::
Unknown
;
Ok
(())
}
}
}
/// Mark the memory object as possibly (but not definitely) freed.
/// Returns an error if the object was definitely freed before
/// or if the object may not be a heap object.
pub
fn
mark_as_maybe_freed
(
&
mut
self
)
->
Result
<
(),
Error
>
{
if
self
.type_
!=
Some
(
ObjectType
::
Heap
)
{
self
.set_state
(
ObjectState
::
Flagged
);
return
Err
(
anyhow!
(
"Free operation on possibly non-heap memory object"
));
}
match
self
.state
{
ObjectState
::
Dangling
=>
{
self
.state
=
ObjectState
::
Flagged
;
Err
(
anyhow!
(
"Object may already have been freed"
))
}
_
=>
{
self
.state
=
ObjectState
::
Unknown
;
Ok
(())
}
}
}
}
impl
AbstractDomain
for
AbstractObjectInfo
{
/// Merge two abstract objects
fn
merge
(
&
self
,
other
:
&
Self
)
->
Self
{
AbstractObjectInfo
{
pointer_targets
:
self
.pointer_targets
.union
(
&
other
.pointer_targets
)
.cloned
()
.collect
(),
is_unique
:
self
.is_unique
&&
other
.is_unique
,
state
:
self
.state
.merge
(
other
.state
),
type_
:
same_or_none
(
&
self
.type_
,
&
other
.type_
),
memory
:
self
.memory
.merge
(
&
other
.memory
),
lower_index_bound
:
self
.lower_index_bound
.merge
(
&
other
.lower_index_bound
),
upper_index_bound
:
self
.upper_index_bound
.merge
(
&
other
.upper_index_bound
),
}
}
/// The domain has no *Top* element, thus this function always returns false.
fn
is_top
(
&
self
)
->
bool
{
false
}
}
impl
AbstractObjectInfo
{
/// Get a more compact json-representation of the abstract object.
/// Intended for pretty printing, not useable for serialization/deserialization.
pub
fn
to_json_compact
(
&
self
)
->
serde_json
::
Value
{
let
mut
elements
=
vec!
[
(
"is_unique"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{}"
,
self
.is_unique
)),
),
(
"state"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{:?}"
,
self
.state
)),
),
(
"type"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{:?}"
,
self
.type_
)),
),
(
"lower_index_bound"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{}"
,
self
.lower_index_bound
)),
),
(
"upper_index_bound"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{}"
,
self
.upper_index_bound
)),
),
];
let
memory
=
self
.memory
.iter
()
.map
(|(
index
,
value
)|
(
format!
(
"{}"
,
index
),
value
.to_json_compact
()));
elements
.push
((
"memory"
.to_string
(),
serde_json
::
Value
::
Object
(
memory
.collect
()),
));
serde_json
::
Value
::
Object
(
elements
.into_iter
()
.collect
())
}
}
/// Helper function for merging two `Option<T>` values (merging to `None` if they are not equal).
fn
same_or_none
<
T
:
Eq
+
Clone
>
(
left
:
&
Option
<
T
>
,
right
:
&
Option
<
T
>
)
->
Option
<
T
>
{
if
left
.as_ref
()
?
==
right
.as_ref
()
?
{
Some
(
left
.as_ref
()
.unwrap
()
.clone
())
}
else
{
None
}
}
/// An object is either a stack or a heap object.
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Hash,
Clone,
Copy,
PartialOrd,
Ord)]
pub
enum
ObjectType
{
/// A stack object, i.e. the stack frame of a function.
Stack
,
/// A memory object located on the heap.
Heap
,
}
/// An object is either alive or dangling (because the memory was freed or a function return invalidated the stack frame).
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Hash,
Clone,
Copy,
PartialOrd,
Ord)]
pub
enum
ObjectState
{
/// The object is alive.
Alive
,
/// The object is dangling, i.e. the memory has been freed already.
Dangling
,
/// The state of the object is unknown (due to merging different object states).
Unknown
,
/// The object was referenced in an "use-after-free" or "double-free" CWE-warning.
/// This state is meant to be temporary to prevent obvious subsequent CWE-warnings with the same root cause.
Flagged
,
}
impl
ObjectState
{
/// Merge two object states.
/// If one of the two states is `Flagged`, then the resulting state is the other object state.
pub
fn
merge
(
self
,
other
:
Self
)
->
Self
{
use
ObjectState
::
*
;
match
(
self
,
other
)
{
(
Flagged
,
state
)
|
(
state
,
Flagged
)
=>
state
,
(
Unknown
,
_
)
|
(
_
,
Unknown
)
=>
Unknown
,
(
Alive
,
Alive
)
=>
Alive
,
(
Dangling
,
Dangling
)
=>
Dangling
,
(
Alive
,
Dangling
)
|
(
Dangling
,
Alive
)
=>
Unknown
,
}
}
}
#[cfg(test)]
mod
tests
{
use
super
::
*
;
fn
new_abstract_object
()
->
AbstractObject
{
let
obj_info
=
AbstractObjectInfo
{
pointer_targets
:
BTreeSet
::
new
(),
is_unique
:
true
,
state
:
ObjectState
::
Alive
,
type_
:
Some
(
ObjectType
::
Heap
),
memory
:
MemRegion
::
new
(
ByteSize
::
new
(
8
)),
lower_index_bound
:
Bitvector
::
from_u64
(
0
)
.into
(),
upper_index_bound
:
Bitvector
::
from_u64
(
99
)
.into
(),
};
AbstractObject
(
Arc
::
new
(
obj_info
))
}
fn
new_data
(
number
:
i64
)
->
Data
{
bv
(
number
)
.into
()
}
fn
bv
(
number
:
i64
)
->
ValueDomain
{
ValueDomain
::
from
(
Bitvector
::
from_i64
(
number
))
}
fn
new_id
(
tid
:
&
str
,
reg_name
:
&
str
)
->
AbstractIdentifier
{
AbstractIdentifier
::
new
(
Tid
::
new
(
tid
),
AbstractLocation
::
Register
(
reg_name
.into
(),
ByteSize
::
new
(
8
)),
)
}
#[test]
fn
abstract_object
()
{
let
mut
object
=
new_abstract_object
();
let
three
=
new_data
(
3
);
let
offset
=
bv
(
-
15
);
object
.set_value
(
three
,
&
offset
)
.unwrap
();
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
16
),
ByteSize
::
new
(
8
)),
Data
::
new_top
(
ByteSize
::
new
(
8
))
);
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
new_data
(
3
)
);
object
.set_value
(
new_data
(
4
),
&
bv
(
-
12
))
.unwrap
();
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
Data
::
new_top
(
ByteSize
::
new
(
8
))
);
object
.merge_value
(
new_data
(
23
),
&
bv
(
-
12
));
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
12
),
ByteSize
::
new
(
8
)),
IntervalDomain
::
mock
(
4
,
23
)
.with_stride
(
19
)
.into
()
);
let
mut
other_object
=
new_abstract_object
();
object
.set_value
(
new_data
(
0
),
&
bv
(
0
))
.unwrap
();
other_object
.set_value
(
new_data
(
0
),
&
bv
(
0
))
.unwrap
();
let
merged_object
=
object
.merge
(
&
other_object
);
assert_eq!
(
merged_object
.get_value
(
Bitvector
::
from_i64
(
-
12
),
ByteSize
::
new
(
8
))
.get_absolute_value
(),
Some
(
&
IntervalDomain
::
mock
(
4
,
23
)
.with_stride
(
19
)
.into
())
);
assert
!
(
merged_object
.get_value
(
Bitvector
::
from_i64
(
-
12
),
ByteSize
::
new
(
8
))
.contains_top
());
assert_eq!
(
merged_object
.get_value
(
Bitvector
::
from_i64
(
0
),
ByteSize
::
new
(
8
)),
new_data
(
0
)
);
}
#[test]
fn
replace_id
()
{
use
std
::
collections
::
BTreeMap
;
let
mut
object
=
new_abstract_object
();
let
mut
target_map
=
BTreeMap
::
new
();
target_map
.insert
(
new_id
(
"time_1"
,
"RAX"
),
bv
(
20
));
target_map
.insert
(
new_id
(
"time_234"
,
"RAX"
),
bv
(
30
));
target_map
.insert
(
new_id
(
"time_1"
,
"RBX"
),
bv
(
40
));
let
pointer
=
DataDomain
::
mock_from_target_map
(
target_map
.clone
());
object
.set_value
(
pointer
,
&
bv
(
-
15
))
.unwrap
();
assert_eq!
(
object
.get_referenced_ids_overapproximation
()
.len
(),
3
);
object
.replace_abstract_id
(
&
new_id
(
"time_1"
,
"RAX"
),
&
new_id
(
"time_234"
,
"RAX"
),
&
bv
(
10
),
);
target_map
.remove
(
&
new_id
(
"time_1"
,
"RAX"
));
let
modified_pointer
=
DataDomain
::
mock_from_target_map
(
target_map
);
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
modified_pointer
);
object
.replace_abstract_id
(
&
new_id
(
"time_1"
,
"RBX"
),
&
new_id
(
"time_234"
,
"RBX"
),
&
bv
(
10
),
);
let
mut
target_map
=
BTreeMap
::
new
();
target_map
.insert
(
new_id
(
"time_234"
,
"RAX"
),
bv
(
30
));
target_map
.insert
(
new_id
(
"time_234"
,
"RBX"
),
bv
(
50
));
let
modified_pointer
=
DataDomain
::
mock_from_target_map
(
target_map
);
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
modified_pointer
);
}
#[test]
fn
remove_ids
()
{
use
std
::
collections
::
BTreeMap
;
let
mut
object
=
new_abstract_object
();
let
mut
target_map
=
BTreeMap
::
new
();
target_map
.insert
(
new_id
(
"time_1"
,
"RAX"
),
bv
(
20
));
target_map
.insert
(
new_id
(
"time_234"
,
"RAX"
),
bv
(
30
));
target_map
.insert
(
new_id
(
"time_1"
,
"RBX"
),
bv
(
40
));
let
pointer
=
DataDomain
::
mock_from_target_map
(
target_map
.clone
());
object
.set_value
(
pointer
,
&
bv
(
-
15
))
.unwrap
();
assert_eq!
(
object
.get_referenced_ids_overapproximation
()
.len
(),
3
);
let
ids_to_remove
=
vec!
[
new_id
(
"time_1"
,
"RAX"
),
new_id
(
"time_23"
,
"RBX"
)]
.into_iter
()
.collect
();
object
.remove_ids
(
&
ids_to_remove
);
assert_eq!
(
object
.get_referenced_ids_overapproximation
(),
&
vec!
[
new_id
(
"time_234"
,
"RAX"
),
new_id
(
"time_1"
,
"RBX"
)]
.into_iter
()
.collect
()
);
}
#[test]
fn
access_contained_in_bounds
()
{
let
object
=
new_abstract_object
();
assert
!
(
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
0
,
99
),
ByteSize
::
new
(
1
)));
assert
!
(
!
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
-
1
,
-
1
),
ByteSize
::
new
(
8
)));
assert
!
(
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
92
,
92
),
ByteSize
::
new
(
8
)));
assert
!
(
!
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
93
,
93
),
ByteSize
::
new
(
8
)));
}
}
src/cwe_checker_lib/src/analysis/pointer_inference/object/id_manipulation.rs
0 → 100644
View file @
05843314
use
super
::
*
;
impl
AbstractObject
{
/// Get all abstract IDs that the object may contain pointers to.
/// This yields an overapproximation of possible pointer targets.
pub
fn
get_referenced_ids_overapproximation
(
&
self
)
->
&
BTreeSet
<
AbstractIdentifier
>
{
&
self
.inner.pointer_targets
}
/// Get all abstract IDs for which the object contains pointers to.
/// This yields an underapproximation of pointer targets,
/// since the object may contain pointers that could not be tracked by the analysis.
pub
fn
get_referenced_ids_underapproximation
(
&
self
)
->
BTreeSet
<
AbstractIdentifier
>
{
let
mut
referenced_ids
=
BTreeSet
::
new
();
for
data
in
self
.inner.memory
.values
()
{
referenced_ids
.extend
(
data
.referenced_ids
()
.cloned
())
}
referenced_ids
}
/// For pointer values replace an abstract identifier with another one and add the offset_adjustment to the pointer offsets.
/// This is needed to adjust stack pointers on call and return instructions.
pub
fn
replace_abstract_id
(
&
mut
self
,
old_id
:
&
AbstractIdentifier
,
new_id
:
&
AbstractIdentifier
,
offset_adjustment
:
&
ValueDomain
,
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
for
elem
in
inner
.memory
.values_mut
()
{
elem
.replace_abstract_id
(
old_id
,
new_id
,
offset_adjustment
);
}
inner
.memory
.clear_top_values
();
if
inner
.pointer_targets
.get
(
old_id
)
.is_some
()
{
inner
.pointer_targets
.remove
(
old_id
);
inner
.pointer_targets
.insert
(
new_id
.clone
());
}
}
/// Remove the provided IDs from the target lists of all pointers in the memory object.
/// Also remove them from the pointer_targets list.
///
/// If this operation would produce an empty value, it replaces it with a `Top` value instead.
pub
fn
remove_ids
(
&
mut
self
,
ids_to_remove
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
inner
.pointer_targets
=
inner
.pointer_targets
.difference
(
ids_to_remove
)
.cloned
()
.collect
();
for
value
in
inner
.memory
.values_mut
()
{
value
.remove_ids
(
ids_to_remove
);
if
value
.is_empty
()
{
*
value
=
value
.top
();
}
}
inner
.memory
.clear_top_values
();
// In case the previous operation left *Top* values in the memory struct.
}
}
src/cwe_checker_lib/src/analysis/pointer_inference/object/mod.rs
0 → 100644
View file @
05843314
//! This module contains the definition of the abstract memory object type.
use
super
::{
Data
,
ValueDomain
};
use
crate
::
abstract_domain
::
*
;
use
crate
::
prelude
::
*
;
use
serde
::{
Deserialize
,
Serialize
};
use
std
::
collections
::
BTreeSet
;
use
std
::
sync
::
Arc
;
/// Methods for manipulating abstract IDs contained in an abstract object.
mod
id_manipulation
;
/// Methods for handling read/write operations on an abstract object.
mod
value_access
;
/// An abstract object contains all knowledge tracked about a particular memory object.
///
/// In some cases one abstract object can represent more than one actual memory object.
/// This happens for e.g. several memory objects allocated into an array,
/// since we cannot represent every object separately without knowing the exact number of objects
/// (which may be runtime dependent).
///
/// To allow cheap cloning of abstract objects, the actual data is wrapped in an `Arc`.
///
/// Examples of memory objects:
/// * The stack frame of a function
/// * A memory object allocated on the heap
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Clone)]
pub
struct
AbstractObject
{
inner
:
Arc
<
Inner
>
,
}
/// The abstract object info contains all information that we track for an abstract object.
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Clone)]
struct
Inner
{
/// An upper approximation of all possible targets for which pointers may exist inside the memory region.
pointer_targets
:
BTreeSet
<
AbstractIdentifier
>
,
/// Tracks whether this may represent more than one actual memory object.
is_unique
:
bool
,
/// Is the object alive or already destroyed
state
:
ObjectState
,
/// Is the object a stack frame or a heap object
type_
:
Option
<
ObjectType
>
,
/// The actual content of the memory object
memory
:
MemRegion
<
Data
>
,
/// The smallest index still contained in the memory region.
/// A `Top` value represents an unknown bound.
/// The bound is not enforced, i.e. reading and writing to indices violating the bound is still allowed.
lower_index_bound
:
BitvectorDomain
,
/// The largest index still contained in the memory region.
/// A `Top` value represents an unknown bound.
/// The bound is not enforced, i.e. reading and writing to indices violating the bound is still allowed.
upper_index_bound
:
BitvectorDomain
,
}
/// An object is either a stack or a heap object.
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Hash,
Clone,
Copy,
PartialOrd,
Ord)]
pub
enum
ObjectType
{
/// A stack object, i.e. the stack frame of a function.
Stack
,
/// A memory object located on the heap.
Heap
,
}
/// An object is either alive or dangling (because the memory was freed or a function return invalidated the stack frame).
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Hash,
Clone,
Copy,
PartialOrd,
Ord)]
pub
enum
ObjectState
{
/// The object is alive.
Alive
,
/// The object is dangling, i.e. the memory has been freed already.
Dangling
,
/// The state of the object is unknown (due to merging different object states).
Unknown
,
/// The object was referenced in an "use-after-free" or "double-free" CWE-warning.
/// This state is meant to be temporary to prevent obvious subsequent CWE-warnings with the same root cause.
Flagged
,
}
#[allow(clippy
::
from_over_into)]
impl
std
::
convert
::
Into
<
AbstractObject
>
for
Inner
{
fn
into
(
self
)
->
AbstractObject
{
AbstractObject
{
inner
:
Arc
::
new
(
self
),
}
}
}
impl
AbstractObject
{
/// Create a new abstract object with given object type and address bytesize.
pub
fn
new
(
type_
:
ObjectType
,
address_bytesize
:
ByteSize
)
->
AbstractObject
{
let
inner
=
Inner
{
pointer_targets
:
BTreeSet
::
new
(),
is_unique
:
true
,
state
:
ObjectState
::
Alive
,
type_
:
Some
(
type_
),
memory
:
MemRegion
::
new
(
address_bytesize
),
lower_index_bound
:
BitvectorDomain
::
Top
(
address_bytesize
),
upper_index_bound
:
BitvectorDomain
::
Top
(
address_bytesize
),
};
inner
.into
()
}
/// Returns `false` if the abstract object may represent more than one object,
/// e.g. for arrays of objects.
pub
fn
is_unique
(
&
self
)
->
bool
{
self
.inner.is_unique
}
/// Mark the abstract object as possibly representing more than one actual memory object.
pub
fn
mark_as_not_unique
(
&
mut
self
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
inner
.is_unique
=
false
;
}
/// Set the lower index bound that is still considered to be contained in the abstract object.
pub
fn
set_lower_index_bound
(
&
mut
self
,
lower_bound
:
BitvectorDomain
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
inner
.lower_index_bound
=
lower_bound
;
}
/// Set the upper index bound that is still considered to be contained in the abstract object.
pub
fn
set_upper_index_bound
(
&
mut
self
,
upper_bound
:
BitvectorDomain
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
inner
.upper_index_bound
=
upper_bound
;
}
/// Get the state of the memory object.
pub
fn
get_state
(
&
self
)
->
ObjectState
{
self
.inner.state
}
/// If `self.is_unique()==true`, set the state of the object. Else merge the new state with the old.
pub
fn
set_state
(
&
mut
self
,
new_state
:
ObjectState
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
if
inner
.is_unique
{
inner
.state
=
new_state
;
}
else
{
inner
.state
=
inner
.state
.merge
(
new_state
);
}
}
/// Get the type of the memory object.
pub
fn
get_object_type
(
&
self
)
->
Option
<
ObjectType
>
{
self
.inner.type_
}
/// Mark the memory object as freed.
/// Returns an error if a possible double free is detected
/// or the memory object may not be a heap object.
pub
fn
mark_as_freed
(
&
mut
self
)
->
Result
<
(),
Error
>
{
if
self
.inner.type_
!=
Some
(
ObjectType
::
Heap
)
{
self
.set_state
(
ObjectState
::
Flagged
);
return
Err
(
anyhow!
(
"Free operation on possibly non-heap memory object"
));
}
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
match
(
inner
.is_unique
,
inner
.state
)
{
(
true
,
ObjectState
::
Alive
)
|
(
true
,
ObjectState
::
Flagged
)
=>
{
inner
.state
=
ObjectState
::
Dangling
;
Ok
(())
}
(
false
,
ObjectState
::
Flagged
)
=>
{
inner
.state
=
ObjectState
::
Unknown
;
Ok
(())
}
(
true
,
_
)
|
(
false
,
ObjectState
::
Dangling
)
=>
{
inner
.state
=
ObjectState
::
Flagged
;
Err
(
anyhow!
(
"Object may already have been freed"
))
}
(
false
,
_
)
=>
{
inner
.state
=
ObjectState
::
Unknown
;
Ok
(())
}
}
}
/// Mark the memory object as possibly (but not definitely) freed.
/// Returns an error if the object was definitely freed before
/// or if the object may not be a heap object.
pub
fn
mark_as_maybe_freed
(
&
mut
self
)
->
Result
<
(),
Error
>
{
if
self
.inner.type_
!=
Some
(
ObjectType
::
Heap
)
{
self
.set_state
(
ObjectState
::
Flagged
);
return
Err
(
anyhow!
(
"Free operation on possibly non-heap memory object"
));
}
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
match
inner
.state
{
ObjectState
::
Dangling
=>
{
inner
.state
=
ObjectState
::
Flagged
;
Err
(
anyhow!
(
"Object may already have been freed"
))
}
_
=>
{
inner
.state
=
ObjectState
::
Unknown
;
Ok
(())
}
}
}
}
impl
AbstractDomain
for
AbstractObject
{
/// Merge two abstract objects
fn
merge
(
&
self
,
other
:
&
Self
)
->
Self
{
if
self
==
other
{
self
.clone
()
}
else
{
Inner
{
pointer_targets
:
self
.inner
.pointer_targets
.union
(
&
other
.inner.pointer_targets
)
.cloned
()
.collect
(),
is_unique
:
self
.inner.is_unique
&&
other
.inner.is_unique
,
state
:
self
.inner.state
.merge
(
other
.inner.state
),
type_
:
same_or_none
(
&
self
.inner.type_
,
&
other
.inner.type_
),
memory
:
self
.inner.memory
.merge
(
&
other
.inner.memory
),
lower_index_bound
:
self
.inner
.lower_index_bound
.merge
(
&
other
.inner.lower_index_bound
),
upper_index_bound
:
self
.inner
.upper_index_bound
.merge
(
&
other
.inner.upper_index_bound
),
}
.into
()
}
}
/// The domain has no *Top* element, thus this function always returns false.
fn
is_top
(
&
self
)
->
bool
{
false
}
}
impl
AbstractObject
{
/// Get a more compact json-representation of the abstract object.
/// Intended for pretty printing, not useable for serialization/deserialization.
pub
fn
to_json_compact
(
&
self
)
->
serde_json
::
Value
{
let
mut
elements
=
vec!
[
(
"is_unique"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{}"
,
self
.inner.is_unique
)),
),
(
"state"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{:?}"
,
self
.inner.state
)),
),
(
"type"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{:?}"
,
self
.inner.type_
)),
),
(
"lower_index_bound"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{}"
,
self
.inner.lower_index_bound
)),
),
(
"upper_index_bound"
.to_string
(),
serde_json
::
Value
::
String
(
format!
(
"{}"
,
self
.inner.upper_index_bound
)),
),
];
let
memory
=
self
.inner
.memory
.iter
()
.map
(|(
index
,
value
)|
(
format!
(
"{}"
,
index
),
value
.to_json_compact
()));
elements
.push
((
"memory"
.to_string
(),
serde_json
::
Value
::
Object
(
memory
.collect
()),
));
serde_json
::
Value
::
Object
(
elements
.into_iter
()
.collect
())
}
}
/// Helper function for merging two `Option<T>` values (merging to `None` if they are not equal).
fn
same_or_none
<
T
:
Eq
+
Clone
>
(
left
:
&
Option
<
T
>
,
right
:
&
Option
<
T
>
)
->
Option
<
T
>
{
if
left
.as_ref
()
?
==
right
.as_ref
()
?
{
Some
(
left
.as_ref
()
.unwrap
()
.clone
())
}
else
{
None
}
}
impl
ObjectState
{
/// Merge two object states.
/// If one of the two states is `Flagged`, then the resulting state is the other object state.
pub
fn
merge
(
self
,
other
:
Self
)
->
Self
{
use
ObjectState
::
*
;
match
(
self
,
other
)
{
(
Flagged
,
state
)
|
(
state
,
Flagged
)
=>
state
,
(
Unknown
,
_
)
|
(
_
,
Unknown
)
=>
Unknown
,
(
Alive
,
Alive
)
=>
Alive
,
(
Dangling
,
Dangling
)
=>
Dangling
,
(
Alive
,
Dangling
)
|
(
Dangling
,
Alive
)
=>
Unknown
,
}
}
}
#[cfg(test)]
mod
tests
;
src/cwe_checker_lib/src/analysis/pointer_inference/object/tests.rs
0 → 100644
View file @
05843314
use
super
::
*
;
fn
new_abstract_object
()
->
AbstractObject
{
let
inner
=
Inner
{
pointer_targets
:
BTreeSet
::
new
(),
is_unique
:
true
,
state
:
ObjectState
::
Alive
,
type_
:
Some
(
ObjectType
::
Heap
),
memory
:
MemRegion
::
new
(
ByteSize
::
new
(
8
)),
lower_index_bound
:
Bitvector
::
from_u64
(
0
)
.into
(),
upper_index_bound
:
Bitvector
::
from_u64
(
99
)
.into
(),
};
inner
.into
()
}
fn
new_data
(
number
:
i64
)
->
Data
{
bv
(
number
)
.into
()
}
fn
bv
(
number
:
i64
)
->
ValueDomain
{
ValueDomain
::
from
(
Bitvector
::
from_i64
(
number
))
}
fn
new_id
(
tid
:
&
str
,
reg_name
:
&
str
)
->
AbstractIdentifier
{
AbstractIdentifier
::
new
(
Tid
::
new
(
tid
),
AbstractLocation
::
Register
(
reg_name
.into
(),
ByteSize
::
new
(
8
)),
)
}
#[test]
fn
abstract_object
()
{
let
mut
object
=
new_abstract_object
();
let
three
=
new_data
(
3
);
let
offset
=
bv
(
-
15
);
object
.set_value
(
three
,
&
offset
)
.unwrap
();
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
16
),
ByteSize
::
new
(
8
)),
Data
::
new_top
(
ByteSize
::
new
(
8
))
);
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
new_data
(
3
)
);
object
.set_value
(
new_data
(
4
),
&
bv
(
-
12
))
.unwrap
();
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
Data
::
new_top
(
ByteSize
::
new
(
8
))
);
object
.merge_value
(
new_data
(
23
),
&
bv
(
-
12
));
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
12
),
ByteSize
::
new
(
8
)),
IntervalDomain
::
mock
(
4
,
23
)
.with_stride
(
19
)
.into
()
);
let
mut
other_object
=
new_abstract_object
();
object
.set_value
(
new_data
(
0
),
&
bv
(
0
))
.unwrap
();
other_object
.set_value
(
new_data
(
0
),
&
bv
(
0
))
.unwrap
();
let
merged_object
=
object
.merge
(
&
other_object
);
assert_eq!
(
merged_object
.get_value
(
Bitvector
::
from_i64
(
-
12
),
ByteSize
::
new
(
8
))
.get_absolute_value
(),
Some
(
&
IntervalDomain
::
mock
(
4
,
23
)
.with_stride
(
19
)
.into
())
);
assert
!
(
merged_object
.get_value
(
Bitvector
::
from_i64
(
-
12
),
ByteSize
::
new
(
8
))
.contains_top
());
assert_eq!
(
merged_object
.get_value
(
Bitvector
::
from_i64
(
0
),
ByteSize
::
new
(
8
)),
new_data
(
0
)
);
}
#[test]
fn
replace_id
()
{
use
std
::
collections
::
BTreeMap
;
let
mut
object
=
new_abstract_object
();
let
mut
target_map
=
BTreeMap
::
new
();
target_map
.insert
(
new_id
(
"time_1"
,
"RAX"
),
bv
(
20
));
target_map
.insert
(
new_id
(
"time_234"
,
"RAX"
),
bv
(
30
));
target_map
.insert
(
new_id
(
"time_1"
,
"RBX"
),
bv
(
40
));
let
pointer
=
DataDomain
::
mock_from_target_map
(
target_map
.clone
());
object
.set_value
(
pointer
,
&
bv
(
-
15
))
.unwrap
();
assert_eq!
(
object
.get_referenced_ids_overapproximation
()
.len
(),
3
);
object
.replace_abstract_id
(
&
new_id
(
"time_1"
,
"RAX"
),
&
new_id
(
"time_234"
,
"RAX"
),
&
bv
(
10
),
);
target_map
.remove
(
&
new_id
(
"time_1"
,
"RAX"
));
let
modified_pointer
=
DataDomain
::
mock_from_target_map
(
target_map
);
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
modified_pointer
);
object
.replace_abstract_id
(
&
new_id
(
"time_1"
,
"RBX"
),
&
new_id
(
"time_234"
,
"RBX"
),
&
bv
(
10
),
);
let
mut
target_map
=
BTreeMap
::
new
();
target_map
.insert
(
new_id
(
"time_234"
,
"RAX"
),
bv
(
30
));
target_map
.insert
(
new_id
(
"time_234"
,
"RBX"
),
bv
(
50
));
let
modified_pointer
=
DataDomain
::
mock_from_target_map
(
target_map
);
assert_eq!
(
object
.get_value
(
Bitvector
::
from_i64
(
-
15
),
ByteSize
::
new
(
8
)),
modified_pointer
);
}
#[test]
fn
remove_ids
()
{
use
std
::
collections
::
BTreeMap
;
let
mut
object
=
new_abstract_object
();
let
mut
target_map
=
BTreeMap
::
new
();
target_map
.insert
(
new_id
(
"time_1"
,
"RAX"
),
bv
(
20
));
target_map
.insert
(
new_id
(
"time_234"
,
"RAX"
),
bv
(
30
));
target_map
.insert
(
new_id
(
"time_1"
,
"RBX"
),
bv
(
40
));
let
pointer
=
DataDomain
::
mock_from_target_map
(
target_map
.clone
());
object
.set_value
(
pointer
,
&
bv
(
-
15
))
.unwrap
();
assert_eq!
(
object
.get_referenced_ids_overapproximation
()
.len
(),
3
);
let
ids_to_remove
=
vec!
[
new_id
(
"time_1"
,
"RAX"
),
new_id
(
"time_23"
,
"RBX"
)]
.into_iter
()
.collect
();
object
.remove_ids
(
&
ids_to_remove
);
assert_eq!
(
object
.get_referenced_ids_overapproximation
(),
&
vec!
[
new_id
(
"time_234"
,
"RAX"
),
new_id
(
"time_1"
,
"RBX"
)]
.into_iter
()
.collect
()
);
}
#[test]
fn
access_contained_in_bounds
()
{
let
object
=
new_abstract_object
();
assert
!
(
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
0
,
99
),
ByteSize
::
new
(
1
)));
assert
!
(
!
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
-
1
,
-
1
),
ByteSize
::
new
(
8
)));
assert
!
(
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
92
,
92
),
ByteSize
::
new
(
8
)));
assert
!
(
!
object
.access_contained_in_bounds
(
&
IntervalDomain
::
mock
(
93
,
93
),
ByteSize
::
new
(
8
)));
}
src/cwe_checker_lib/src/analysis/pointer_inference/object/value_access.rs
0 → 100644
View file @
05843314
use
super
::
*
;
impl
AbstractObject
{
/// Check whether a memory access to the abstract object at the given offset
/// and with the given size of the accessed value is contained in the bounds of the memory object.
/// If `offset` contains more than one possible index value,
/// then only return `true` if the access is contained in the abstract object for all possible offset values.
pub
fn
access_contained_in_bounds
(
&
self
,
offset
:
&
ValueDomain
,
size
:
ByteSize
)
->
bool
{
if
let
Ok
(
offset_interval
)
=
offset
.try_to_interval
()
{
if
let
Ok
(
lower_bound
)
=
self
.inner.lower_index_bound
.try_to_bitvec
()
{
if
lower_bound
.checked_sgt
(
&
offset_interval
.start
)
.unwrap
()
{
return
false
;
}
}
if
let
Ok
(
upper_bound
)
=
self
.inner.upper_index_bound
.try_to_bitvec
()
{
let
mut
size_as_bitvec
=
Bitvector
::
from_u64
(
u64
::
from
(
size
));
match
offset
.bytesize
()
.cmp
(
&
size_as_bitvec
.bytesize
())
{
std
::
cmp
::
Ordering
::
Less
=>
size_as_bitvec
.truncate
(
offset
.bytesize
())
.unwrap
(),
std
::
cmp
::
Ordering
::
Greater
=>
{
size_as_bitvec
.sign_extend
(
offset
.bytesize
())
.unwrap
()
}
std
::
cmp
::
Ordering
::
Equal
=>
(),
}
let
max_index
=
if
let
Some
(
val
)
=
offset_interval
.end
.signed_add_overflow_checked
(
&
size_as_bitvec
)
{
val
-
&
Bitvector
::
one
(
offset
.bytesize
()
.into
())
}
else
{
return
false
;
// The max index already causes an integer overflow
};
if
upper_bound
.checked_slt
(
&
max_index
)
.unwrap
()
{
return
false
;
}
}
true
}
else
{
false
}
}
/// Read the value at the given offset of the given size inside the memory region.
pub
fn
get_value
(
&
self
,
offset
:
Bitvector
,
bytesize
:
ByteSize
)
->
Data
{
self
.inner.memory
.get
(
offset
,
bytesize
)
}
/// Write a value at the given offset to the memory region.
///
/// If the abstract object is not unique (i.e. may represent more than one actual object),
/// merge the old value at the given offset with the new value.
pub
fn
set_value
(
&
mut
self
,
value
:
Data
,
offset
:
&
ValueDomain
)
->
Result
<
(),
Error
>
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
inner
.pointer_targets
.extend
(
value
.referenced_ids
()
.cloned
());
if
let
Ok
(
concrete_offset
)
=
offset
.try_to_bitvec
()
{
if
inner
.is_unique
{
inner
.memory
.add
(
value
,
concrete_offset
);
}
else
{
let
merged_value
=
inner
.memory
.get
(
concrete_offset
.clone
(),
value
.bytesize
())
.merge
(
&
value
);
inner
.memory
.add
(
merged_value
,
concrete_offset
);
};
}
else
if
let
Ok
((
start
,
end
))
=
offset
.try_to_offset_interval
()
{
inner
.memory
.mark_interval_values_as_top
(
start
,
end
,
value
.bytesize
());
}
else
{
inner
.memory
.mark_all_values_as_top
();
}
Ok
(())
}
/// Merge `value` at position `offset` with the value currently saved at that position.
pub
fn
merge_value
(
&
mut
self
,
value
:
Data
,
offset
:
&
ValueDomain
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
inner
.pointer_targets
.extend
(
value
.referenced_ids
()
.cloned
());
if
let
Ok
(
concrete_offset
)
=
offset
.try_to_bitvec
()
{
let
merged_value
=
inner
.memory
.get
(
concrete_offset
.clone
(),
value
.bytesize
())
.merge
(
&
value
);
inner
.memory
.add
(
merged_value
,
concrete_offset
);
}
else
if
let
Ok
((
start
,
end
))
=
offset
.try_to_offset_interval
()
{
inner
.memory
.mark_interval_values_as_top
(
start
,
end
,
value
.bytesize
());
}
else
{
inner
.memory
.mark_all_values_as_top
();
}
}
/// Marks all memory as `Top` and adds the `additional_targets` to the pointer targets.
/// Represents the effect of unknown write instructions to the object
/// which may include writing pointers to targets from the `additional_targets` set to the object.
pub
fn
assume_arbitrary_writes
(
&
mut
self
,
additional_targets
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
let
inner
=
Arc
::
make_mut
(
&
mut
self
.inner
);
inner
.memory
.mark_all_values_as_top
();
inner
.pointer_targets
.extend
(
additional_targets
.iter
()
.cloned
());
}
}
src/cwe_checker_lib/src/analysis/pointer_inference/object_list.rs
deleted
100644 → 0
View file @
323d070e
use
super
::
object
::
*
;
use
super
::{
Data
,
ValueDomain
};
use
crate
::
prelude
::
*
;
use
crate
::{
abstract_domain
::
*
,
utils
::
binary
::
RuntimeMemoryImage
};
use
serde
::{
Deserialize
,
Serialize
};
use
std
::
collections
::{
BTreeMap
,
BTreeSet
};
/// The list of all known abstract objects.
///
/// Each abstract object is unique in the sense that there is exactly one abstract identifier pointing to it.
/// However, an abstract object itself can be marked as non-unique
/// to indicate that it may represent more than one actual memory object.
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Clone)]
pub
struct
AbstractObjectList
{
/// The abstract objects.
///
/// Each abstract object comes with an offset given as a [`ValueDomain`].
/// This offset determines where the zero offset corresponding to the abstract identifier inside the object is.
/// Note that this offset may be a `Top` element
/// if the exact offset corresponding to the identifier is unknown.
objects
:
BTreeMap
<
AbstractIdentifier
,
(
AbstractObject
,
ValueDomain
)
>
,
}
impl
AbstractObjectList
{
/// Create a new abstract object list with just one abstract object corresponding to the stack.
///
/// The offset into the stack object and the `upper_index_bound` of the stack object will be both set to zero.
/// This corresponds to the generic stack state at the start of a function.
pub
fn
from_stack_id
(
stack_id
:
AbstractIdentifier
,
address_bytesize
:
ByteSize
,
)
->
AbstractObjectList
{
let
mut
objects
=
BTreeMap
::
new
();
let
mut
stack_object
=
AbstractObject
::
new
(
ObjectType
::
Stack
,
address_bytesize
);
stack_object
.set_upper_index_bound
(
Bitvector
::
zero
(
address_bytesize
.into
())
.into
());
objects
.insert
(
stack_id
,
(
stack_object
,
Bitvector
::
zero
(
apint
::
BitWidth
::
from
(
address_bytesize
))
.into
(),
),
);
AbstractObjectList
{
objects
}
}
/// Check the state of a memory object at a given address.
/// Returns `true` if at least one of the targets of the pointer is dangling.
/// If `report_unknown_states` is `true`,
/// then objects with unknown states get reported if they are unique.
/// I.e. objects representing more than one actual object (e.g. an array of object) will not get reported,
/// even if their state is unknown and `report_unknown_states` is `true`.
pub
fn
is_dangling_pointer
(
&
self
,
address
:
&
Data
,
report_unknown_states
:
bool
)
->
bool
{
for
id
in
address
.referenced_ids
()
{
if
let
Some
((
object
,
_offset_id
))
=
self
.objects
.get
(
id
)
{
match
(
report_unknown_states
,
object
.get_state
())
{
(
_
,
ObjectState
::
Dangling
)
=>
return
true
,
(
true
,
ObjectState
::
Unknown
)
=>
{
if
object
.is_unique
{
return
true
;
}
}
_
=>
(),
}
}
}
// No dangling pointer found
false
}
/// Mark all memory objects targeted by the given `address` pointer,
/// whose state is either dangling or unknown,
/// as flagged.
pub
fn
mark_dangling_pointer_targets_as_flagged
(
&
mut
self
,
address
:
&
Data
)
{
for
id
in
address
.referenced_ids
()
{
let
(
object
,
_
)
=
self
.objects
.get_mut
(
id
)
.unwrap
();
if
matches!
(
object
.get_state
(),
ObjectState
::
Unknown
|
ObjectState
::
Dangling
)
{
object
.set_state
(
ObjectState
::
Flagged
);
}
}
}
/// Check whether a memory access at the given address (and accessing `size` many bytes)
/// may be an out-of-bounds memory access.
///
/// Note that `Top` values as addresses are not marked as out-of-bounds,
/// since they are more likely due to analysis imprecision than to actual out-of-bounds access.
pub
fn
is_out_of_bounds_mem_access
(
&
self
,
address
:
&
Data
,
size
:
ByteSize
,
global_data
:
&
RuntimeMemoryImage
,
)
->
bool
{
if
let
Some
(
value
)
=
address
.get_absolute_value
()
{
if
let
Ok
((
start
,
end
))
=
value
.try_to_offset_interval
()
{
if
start
<
0
||
end
<
start
{
return
true
;
}
if
global_data
.is_interval_readable
(
start
as
u64
,
end
as
u64
+
u64
::
from
(
size
)
-
1
)
.is_err
()
{
return
true
;
}
}
}
for
(
id
,
offset
)
in
address
.get_relative_values
()
{
if
let
Some
((
object
,
base_offset
))
=
self
.objects
.get
(
id
)
{
let
adjusted_offset
=
offset
.clone
()
+
base_offset
.clone
();
if
!
adjusted_offset
.is_top
()
&&
!
object
.access_contained_in_bounds
(
&
adjusted_offset
,
size
)
{
return
true
;
}
}
}
false
}
/// Set the lower index bound for indices to be considered inside the memory object.
/// The bound is inclusive, i.e. the bound index itself is also considered to be inside the memory object.
///
/// Any `bound` value other than a constant bitvector is interpreted as the memory object not having a lower bound.
pub
fn
set_lower_index_bound
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
,
bound
:
&
ValueDomain
)
{
let
(
object
,
base_offset
)
=
self
.objects
.get_mut
(
object_id
)
.unwrap
();
let
bound
=
(
bound
.clone
()
+
base_offset
.clone
())
.try_to_bitvec
()
.map
(|
bitvec
|
bitvec
.into
())
.unwrap_or_else
(|
_
|
BitvectorDomain
::
new_top
(
bound
.bytesize
()));
object
.set_lower_index_bound
(
bound
);
}
/// Set the upper index bound for indices to be considered inside the memory object.
/// The bound is inclusive, i.e. the bound index itself is also considered to be inside the memory object.
///
/// Any `bound` value other than a constant bitvector is interpreted as the memory object not having an upper bound.
pub
fn
set_upper_index_bound
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
,
bound
:
&
ValueDomain
)
{
let
(
object
,
base_offset
)
=
self
.objects
.get_mut
(
object_id
)
.unwrap
();
let
bound
=
(
bound
.clone
()
+
base_offset
.clone
())
.try_to_bitvec
()
.map
(|
bitvec
|
bitvec
.into
())
.unwrap_or_else
(|
_
|
BitvectorDomain
::
new_top
(
bound
.bytesize
()));
object
.set_upper_index_bound
(
bound
);
}
/// Get the value at a given address.
/// If the address is not unique, merge the value of all possible addresses.
///
/// This function only checks for relative targets and not for absolute addresses.
/// If the address does not contain any relative targets an empty value is returned.
pub
fn
get_value
(
&
self
,
address
:
&
Data
,
size
:
ByteSize
)
->
Data
{
let
mut
merged_value
=
Data
::
new_empty
(
size
);
for
(
id
,
offset_pointer
)
in
address
.get_relative_values
()
{
if
let
Some
((
object
,
offset_identifier
))
=
self
.objects
.get
(
id
)
{
let
offset
=
offset_pointer
.clone
()
+
offset_identifier
.clone
();
if
let
Ok
(
concrete_offset
)
=
offset
.try_to_bitvec
()
{
let
value
=
object
.get_value
(
concrete_offset
,
size
);
merged_value
=
merged_value
.merge
(
&
value
);
}
else
{
merged_value
.set_contains_top_flag
();
}
}
else
{
merged_value
.set_contains_top_flag
();
}
}
if
address
.contains_top
()
{
merged_value
.set_contains_top_flag
();
}
merged_value
}
/// Set the value at a given address.
///
/// If the address has more than one target,
/// we merge-write the value to all targets.
pub
fn
set_value
(
&
mut
self
,
pointer
:
Data
,
value
:
Data
)
->
Result
<
(),
Error
>
{
let
targets
=
pointer
.get_relative_values
();
match
targets
.len
()
{
0
=>
Ok
(()),
1
=>
{
let
(
id
,
pointer_offset
)
=
targets
.iter
()
.next
()
.unwrap
();
let
(
object
,
id_offset
)
=
self
.objects
.get_mut
(
id
)
.unwrap
();
let
adjusted_offset
=
pointer_offset
.clone
()
+
id_offset
.clone
();
object
.set_value
(
value
,
&
adjusted_offset
)
}
_
=>
{
// There is more than one object that the pointer may write to.
// We merge-write to all possible targets
for
(
id
,
offset
)
in
targets
{
let
(
object
,
object_offset
)
=
self
.objects
.get_mut
(
id
)
.unwrap
();
let
adjusted_offset
=
offset
.clone
()
+
object_offset
.clone
();
object
.merge_value
(
value
.clone
(),
&
adjusted_offset
);
}
Ok
(())
}
}
}
/// Replace one abstract identifier with another one. Adjust offsets of all pointers accordingly.
///
/// **Example:**
/// Assume the `old_id` points to offset 0 in the corresponding memory object and the `new_id` points to offset -32.
/// Then the offset_adjustment is -32.
/// The offset_adjustment gets *added* to the base offset in `self.memory.ids` (so that it points to offset -32 in the memory object),
/// while it gets *subtracted* from all pointer values (so that they still point to the same spot in the corresponding memory object).
pub
fn
replace_abstract_id
(
&
mut
self
,
old_id
:
&
AbstractIdentifier
,
new_id
:
&
AbstractIdentifier
,
offset_adjustment
:
&
ValueDomain
,
)
{
let
negative_offset
=
-
offset_adjustment
.clone
();
for
(
object
,
_
)
in
self
.objects
.values_mut
()
{
object
.replace_abstract_id
(
old_id
,
new_id
,
&
negative_offset
);
}
if
let
Some
((
object
,
old_offset
))
=
self
.objects
.remove
(
old_id
)
{
let
new_offset
=
old_offset
+
offset_adjustment
.clone
();
self
.objects
.insert
(
new_id
.clone
(),
(
object
,
new_offset
));
}
}
/// Remove the memory object that `object_id` points to from the object list.
pub
fn
remove_object
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
)
{
self
.objects
.remove
(
object_id
);
}
/// Add a new abstract object to the object list
///
/// If an object with the same ID already exists,
/// the object is marked as non-unique and merged with the newly created object.
pub
fn
add_abstract_object
(
&
mut
self
,
object_id
:
AbstractIdentifier
,
initial_offset
:
ValueDomain
,
type_
:
ObjectType
,
address_bytesize
:
ByteSize
,
)
{
let
new_object
=
AbstractObject
::
new
(
type_
,
address_bytesize
);
if
let
Some
((
object
,
offset
))
=
self
.objects
.get_mut
(
&
object_id
)
{
// If the identifier already exists, we have to assume that more than one object may be referenced by this identifier.
object
.is_unique
=
false
;
*
object
=
object
.merge
(
&
new_object
);
*
offset
=
offset
.merge
(
&
initial_offset
);
}
else
{
self
.objects
.insert
(
object_id
,
(
new_object
,
initial_offset
));
}
}
/// Return all IDs that may be referenced by the memory object pointed to by the given ID.
/// The returned set is an overapproximation of the actual referenced IDs.
pub
fn
get_referenced_ids_overapproximation
(
&
self
,
id
:
&
AbstractIdentifier
,
)
->
BTreeSet
<
AbstractIdentifier
>
{
if
let
Some
((
object
,
_offset
))
=
self
.objects
.get
(
id
)
{
object
.get_referenced_ids_overapproximation
()
.clone
()
}
else
{
BTreeSet
::
new
()
}
}
/// Return all IDs that get referenced by the memory object pointed to by the given ID.
/// The returned set is an underapproximation of the actual referenced IDs,
/// since only still tracked pointers inside the memory object are used to compute it.
pub
fn
get_referenced_ids_underapproximation
(
&
self
,
id
:
&
AbstractIdentifier
,
)
->
BTreeSet
<
AbstractIdentifier
>
{
if
let
Some
((
object
,
_offset
))
=
self
.objects
.get
(
id
)
{
object
.get_referenced_ids_underapproximation
()
}
else
{
panic!
(
"Abstract ID not associated to an object"
)
}
}
/// For abstract IDs not contained in the provided set of IDs
/// remove the corresponding abstract objects.
///
/// This function does not remove any pointer targets in the contained abstract objects.
pub
fn
remove_unused_objects
(
&
mut
self
,
ids_to_keep
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
let
all_ids
:
BTreeSet
<
AbstractIdentifier
>
=
self
.objects
.keys
()
.cloned
()
.collect
();
let
ids_to_remove
=
all_ids
.difference
(
ids_to_keep
);
for
id
in
ids_to_remove
{
self
.objects
.remove
(
id
);
}
}
/// Get all object IDs.
pub
fn
get_all_object_ids
(
&
self
)
->
BTreeSet
<
AbstractIdentifier
>
{
self
.objects
.keys
()
.cloned
()
.collect
()
}
/// Mark a memory object as already freed (i.e. pointers to it are dangling).
///
/// If the object cannot be identified uniquely, all possible targets are marked as having an unknown status.
/// Returns either a non-empty list of detected errors (like possible double frees) or `OK(())` if no errors were found.
pub
fn
mark_mem_object_as_freed
(
&
mut
self
,
object_pointer
:
&
Data
,
)
->
Result
<
(),
Vec
<
(
AbstractIdentifier
,
Error
)
>>
{
let
ids
:
Vec
<
AbstractIdentifier
>
=
object_pointer
.referenced_ids
()
.cloned
()
.collect
();
let
mut
possible_double_free_ids
=
Vec
::
new
();
if
ids
.len
()
>
1
{
for
id
in
ids
{
if
let
Err
(
error
)
=
self
.objects
.get_mut
(
&
id
)
.unwrap
()
.
0
.mark_as_maybe_freed
()
{
possible_double_free_ids
.push
((
id
.clone
(),
error
));
}
}
}
else
if
let
Some
(
id
)
=
ids
.get
(
0
)
{
if
let
Err
(
error
)
=
self
.objects
.get_mut
(
id
)
.unwrap
()
.
0
.mark_as_freed
()
{
possible_double_free_ids
.push
((
id
.clone
(),
error
));
}
}
if
possible_double_free_ids
.is_empty
()
{
Ok
(())
}
else
{
Err
(
possible_double_free_ids
)
}
}
/// Assume that arbitrary writes happened to a memory object,
/// including adding pointers to targets contained in `new_possible_reference_targets` to it.
///
/// This is used as a coarse approximation for function calls whose effect is unknown.
/// Note that this may still underestimate the effect of a function call:
/// We do not assume that the state of the object changes (i.e. no memory freed), which may not be true.
/// We assume that pointers to the object are *not* given to other threads or the operating system,
/// which could result in arbitrary writes to the object even after the function call returned.
pub
fn
assume_arbitrary_writes_to_object
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
,
new_possible_reference_targets
:
&
BTreeSet
<
AbstractIdentifier
>
,
)
{
if
let
Some
((
object
,
_
))
=
self
.objects
.get_mut
(
object_id
)
{
object
.assume_arbitrary_writes
(
new_possible_reference_targets
);
}
}
/// Get the number of objects that are currently tracked.
#[cfg(test)]
pub
fn
get_num_objects
(
&
self
)
->
usize
{
self
.objects
.len
()
}
/// Append those objects from another object list, whose abstract IDs are not known to self.
pub
fn
append_unknown_objects
(
&
mut
self
,
other_object_list
:
&
AbstractObjectList
)
{
for
(
id
,
(
other_object
,
other_offset
))
in
other_object_list
.objects
.iter
()
{
if
self
.objects
.get
(
id
)
==
None
{
self
.objects
.insert
(
id
.clone
(),
(
other_object
.clone
(),
other_offset
.clone
()));
}
}
}
/// Remove the provided IDs as targets from all pointers in all objects.
/// Also remove the objects, that these IDs point to.
pub
fn
remove_ids
(
&
mut
self
,
ids_to_remove
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
for
id
in
ids_to_remove
{
if
self
.objects
.get
(
id
)
.is_some
()
{
self
.objects
.remove
(
id
);
}
}
for
(
object
,
_
)
in
self
.objects
.values_mut
()
{
object
.remove_ids
(
ids_to_remove
);
}
}
/// Return the object type of a memory object.
/// Returns an error if no object with the given ID is contained in the object list.
pub
fn
get_object_type
(
&
self
,
object_id
:
&
AbstractIdentifier
,
)
->
Result
<
Option
<
ObjectType
>
,
()
>
{
match
self
.objects
.get
(
object_id
)
{
Some
((
object
,
_
))
=>
Ok
(
object
.get_object_type
()),
None
=>
Err
(()),
}
}
/// Returns `true` if the object corresponding to the given ID represents an unique object
/// and `false` if it may represent more than one object (e.g. several array elements).
/// Returns an error if the ID is not contained in the object list.
pub
fn
is_unique_object
(
&
self
,
object_id
:
&
AbstractIdentifier
)
->
Result
<
bool
,
Error
>
{
match
self
.objects
.get
(
object_id
)
{
Some
((
object
,
_
))
=>
Ok
(
object
.is_unique
),
None
=>
Err
(
anyhow!
(
"Object ID not contained in object list."
)),
}
}
}
impl
AbstractDomain
for
AbstractObjectList
{
/// Merge two abstract object lists.
///
/// Right now this function is only sound if for each abstract object only one ID pointing to it exists.
/// Violations of this will be detected and result in panics.
/// Further investigation into the problem is needed
/// to decide, how to correctly represent and handle cases,
/// where more than one ID should point to the same object.
fn
merge
(
&
self
,
other
:
&
Self
)
->
Self
{
let
mut
merged_objects
=
self
.objects
.clone
();
for
(
id
,
(
other_object
,
other_offset
))
in
other
.objects
.iter
()
{
if
let
Some
((
object
,
offset
))
=
merged_objects
.get_mut
(
id
)
{
*
object
=
object
.merge
(
other_object
);
*
offset
=
offset
.merge
(
other_offset
);
}
else
{
merged_objects
.insert
(
id
.clone
(),
(
other_object
.clone
(),
other_offset
.clone
()));
}
}
AbstractObjectList
{
objects
:
merged_objects
,
}
}
/// Always returns `false`, since abstract object lists have no *Top* element.
fn
is_top
(
&
self
)
->
bool
{
false
}
}
impl
AbstractObjectList
{
/// Get a more compact json-representation of the abstract object list.
/// Intended for pretty printing, not useable for serialization/deserialization.
pub
fn
to_json_compact
(
&
self
)
->
serde_json
::
Value
{
use
serde_json
::
*
;
let
mut
object_map
=
Map
::
new
();
for
(
id
,
(
object
,
offset
))
in
self
.objects
.iter
()
{
object_map
.insert
(
format!
(
"{} (base offset {})"
,
id
,
offset
),
object
.to_json_compact
(),
);
}
Value
::
Object
(
object_map
)
}
}
#[cfg(test)]
mod
tests
{
use
super
::
*
;
fn
bv
(
value
:
i64
)
->
ValueDomain
{
ValueDomain
::
from
(
Bitvector
::
from_i64
(
value
))
}
fn
new_id
(
name
:
&
str
)
->
AbstractIdentifier
{
AbstractIdentifier
::
new
(
Tid
::
new
(
"time0"
),
AbstractLocation
::
Register
(
name
.into
(),
ByteSize
::
new
(
8
)),
)
}
#[test]
fn
abstract_object_list
()
{
let
mut
obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"RSP"
.into
()),
ByteSize
::
new
(
8
));
assert_eq!
(
obj_list
.objects
.len
(),
1
);
assert_eq!
(
obj_list
.objects
.values
()
.next
()
.unwrap
()
.
1
,
bv
(
0
));
let
pointer
=
DataDomain
::
from_target
(
new_id
(
"RSP"
.into
()),
bv
(
8
));
obj_list
.set_value
(
pointer
.clone
(),
bv
(
42
)
.into
())
.unwrap
();
assert_eq!
(
obj_list
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
bv
(
42
)
.into
()
);
let
mut
other_obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"RSP"
.into
()),
ByteSize
::
new
(
8
));
let
second_pointer
=
DataDomain
::
from_target
(
new_id
(
"RSP"
.into
()),
bv
(
-
8
));
other_obj_list
.set_value
(
pointer
.clone
(),
bv
(
42
)
.into
())
.unwrap
();
other_obj_list
.set_value
(
second_pointer
.clone
(),
bv
(
35
)
.into
())
.unwrap
();
assert_eq!
(
other_obj_list
.get_value
(
&
second_pointer
,
ByteSize
::
new
(
8
)),
bv
(
35
)
.into
()
);
other_obj_list
.add_abstract_object
(
new_id
(
"RAX"
.into
()),
bv
(
0
),
ObjectType
::
Heap
,
ByteSize
::
new
(
8
),
);
let
heap_pointer
=
DataDomain
::
from_target
(
new_id
(
"RAX"
.into
()),
bv
(
8
));
other_obj_list
.set_value
(
heap_pointer
.clone
(),
bv
(
3
)
.into
())
.unwrap
();
let
mut
merged
=
obj_list
.merge
(
&
other_obj_list
);
assert_eq!
(
merged
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
bv
(
42
)
.into
());
assert
!
(
merged
.get_value
(
&
second_pointer
,
ByteSize
::
new
(
8
))
.contains_top
());
assert_eq!
(
merged
.get_value
(
&
heap_pointer
,
ByteSize
::
new
(
8
)),
bv
(
3
)
.into
()
);
assert_eq!
(
merged
.objects
.len
(),
2
);
merged
.set_value
(
pointer
.merge
(
&
heap_pointer
),
bv
(
3
)
.into
())
.unwrap
();
assert_eq!
(
merged
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
IntervalDomain
::
mock
(
3
,
42
)
.with_stride
(
39
)
.into
()
);
assert_eq!
(
merged
.get_value
(
&
heap_pointer
,
ByteSize
::
new
(
8
)),
bv
(
3
)
.into
()
);
assert_eq!
(
merged
.objects
.len
(),
2
);
other_obj_list
.set_value
(
pointer
.clone
(),
heap_pointer
.clone
())
.unwrap
();
assert_eq!
(
other_obj_list
.get_referenced_ids_overapproximation
(
&
new_id
(
"RSP"
.into
()))
.len
(),
1
);
assert_eq!
(
*
other_obj_list
.get_referenced_ids_overapproximation
(
&
new_id
(
"RSP"
.into
()))
.iter
()
.next
()
.unwrap
(),
new_id
(
"RAX"
.into
())
);
let
modified_heap_pointer
=
DataDomain
::
from_target
(
new_id
(
"ID2"
.into
()),
bv
(
8
));
other_obj_list
.replace_abstract_id
(
&
new_id
(
"RAX"
.into
()),
&
new_id
(
"ID2"
.into
()),
&
bv
(
0
));
assert_eq!
(
other_obj_list
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
modified_heap_pointer
.clone
()
);
assert_eq!
(
other_obj_list
.objects
.get
(
&
new_id
(
"RAX"
.into
())),
None
);
assert
!
(
matches!
(
other_obj_list
.objects
.get
(
&
new_id
(
"ID2"
.into
())),
Some
(
_
)
));
let
mut
ids_to_keep
=
BTreeSet
::
new
();
ids_to_keep
.insert
(
new_id
(
"ID2"
.into
()));
other_obj_list
.remove_unused_objects
(
&
ids_to_keep
);
assert_eq!
(
other_obj_list
.objects
.len
(),
1
);
assert_eq!
(
other_obj_list
.objects
.iter
()
.next
()
.unwrap
()
.
0
,
&
new_id
(
"ID2"
.into
())
);
assert_eq!
(
other_obj_list
.objects
.values
()
.next
()
.unwrap
()
.
0
.get_state
(),
crate
::
analysis
::
pointer_inference
::
object
::
ObjectState
::
Alive
);
other_obj_list
.mark_mem_object_as_freed
(
&
modified_heap_pointer
)
.unwrap
();
assert_eq!
(
other_obj_list
.objects
.values
()
.next
()
.unwrap
()
.
0
.get_state
(),
crate
::
analysis
::
pointer_inference
::
object
::
ObjectState
::
Dangling
);
}
#[test]
fn
append_unknown_objects_test
()
{
let
mut
obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"stack"
),
ByteSize
::
new
(
8
));
let
mut
other_obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"stack"
),
ByteSize
::
new
(
8
));
other_obj_list
.add_abstract_object
(
new_id
(
"heap_obj"
),
bv
(
0
)
.into
(),
ObjectType
::
Heap
,
ByteSize
::
new
(
8
),
);
obj_list
.append_unknown_objects
(
&
other_obj_list
);
assert_eq!
(
obj_list
.objects
.len
(),
2
);
assert
!
(
obj_list
.objects
.get
(
&
new_id
(
"stack"
))
.is_some
());
assert
!
(
obj_list
.objects
.get
(
&
new_id
(
"heap_obj"
))
.is_some
());
}
}
src/cwe_checker_lib/src/analysis/pointer_inference/object_list/cwe_helpers.rs
0 → 100644
View file @
05843314
//! Methods of [`AbstractObjectList`] that manage memory access rules
//! or check whether they are violated.
//! E.g. checks for use-after-free or buffer overflow checks.
use
super
::
*
;
impl
AbstractObjectList
{
/// Check the state of a memory object at a given address.
/// Returns `true` if at least one of the targets of the pointer is dangling.
/// If `report_unknown_states` is `true`,
/// then objects with unknown states get reported if they are unique.
/// I.e. objects representing more than one actual object (e.g. an array of object) will not get reported,
/// even if their state is unknown and `report_unknown_states` is `true`.
pub
fn
is_dangling_pointer
(
&
self
,
address
:
&
Data
,
report_unknown_states
:
bool
)
->
bool
{
for
id
in
address
.referenced_ids
()
{
if
let
Some
((
object
,
_offset_id
))
=
self
.objects
.get
(
id
)
{
match
(
report_unknown_states
,
object
.get_state
())
{
(
_
,
ObjectState
::
Dangling
)
=>
return
true
,
(
true
,
ObjectState
::
Unknown
)
=>
{
if
object
.is_unique
()
{
return
true
;
}
}
_
=>
(),
}
}
}
// No dangling pointer found
false
}
/// Mark all memory objects targeted by the given `address` pointer,
/// whose state is either dangling or unknown,
/// as flagged.
pub
fn
mark_dangling_pointer_targets_as_flagged
(
&
mut
self
,
address
:
&
Data
)
{
for
id
in
address
.referenced_ids
()
{
let
(
object
,
_
)
=
self
.objects
.get_mut
(
id
)
.unwrap
();
if
matches!
(
object
.get_state
(),
ObjectState
::
Unknown
|
ObjectState
::
Dangling
)
{
object
.set_state
(
ObjectState
::
Flagged
);
}
}
}
/// Check whether a memory access at the given address (and accessing `size` many bytes)
/// may be an out-of-bounds memory access.
///
/// Note that `Top` values as addresses are not marked as out-of-bounds,
/// since they are more likely due to analysis imprecision than to actual out-of-bounds access.
pub
fn
is_out_of_bounds_mem_access
(
&
self
,
address
:
&
Data
,
size
:
ByteSize
,
global_data
:
&
RuntimeMemoryImage
,
)
->
bool
{
if
let
Some
(
value
)
=
address
.get_absolute_value
()
{
if
let
Ok
((
start
,
end
))
=
value
.try_to_offset_interval
()
{
if
start
<
0
||
end
<
start
{
return
true
;
}
if
global_data
.is_interval_readable
(
start
as
u64
,
end
as
u64
+
u64
::
from
(
size
)
-
1
)
.is_err
()
{
return
true
;
}
}
}
for
(
id
,
offset
)
in
address
.get_relative_values
()
{
if
let
Some
((
object
,
base_offset
))
=
self
.objects
.get
(
id
)
{
let
adjusted_offset
=
offset
.clone
()
+
base_offset
.clone
();
if
!
adjusted_offset
.is_top
()
&&
!
object
.access_contained_in_bounds
(
&
adjusted_offset
,
size
)
{
return
true
;
}
}
}
false
}
/// Set the lower index bound for indices to be considered inside the memory object.
/// The bound is inclusive, i.e. the bound index itself is also considered to be inside the memory object.
///
/// Any `bound` value other than a constant bitvector is interpreted as the memory object not having a lower bound.
pub
fn
set_lower_index_bound
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
,
bound
:
&
ValueDomain
)
{
let
(
object
,
base_offset
)
=
self
.objects
.get_mut
(
object_id
)
.unwrap
();
let
bound
=
(
bound
.clone
()
+
base_offset
.clone
())
.try_to_bitvec
()
.map
(|
bitvec
|
bitvec
.into
())
.unwrap_or_else
(|
_
|
BitvectorDomain
::
new_top
(
bound
.bytesize
()));
object
.set_lower_index_bound
(
bound
);
}
/// Set the upper index bound for indices to be considered inside the memory object.
/// The bound is inclusive, i.e. the bound index itself is also considered to be inside the memory object.
///
/// Any `bound` value other than a constant bitvector is interpreted as the memory object not having an upper bound.
pub
fn
set_upper_index_bound
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
,
bound
:
&
ValueDomain
)
{
let
(
object
,
base_offset
)
=
self
.objects
.get_mut
(
object_id
)
.unwrap
();
let
bound
=
(
bound
.clone
()
+
base_offset
.clone
())
.try_to_bitvec
()
.map
(|
bitvec
|
bitvec
.into
())
.unwrap_or_else
(|
_
|
BitvectorDomain
::
new_top
(
bound
.bytesize
()));
object
.set_upper_index_bound
(
bound
);
}
/// Mark a memory object as already freed (i.e. pointers to it are dangling).
///
/// If the object cannot be identified uniquely, all possible targets are marked as having an unknown status.
/// Returns either a non-empty list of detected errors (like possible double frees) or `OK(())` if no errors were found.
pub
fn
mark_mem_object_as_freed
(
&
mut
self
,
object_pointer
:
&
Data
,
)
->
Result
<
(),
Vec
<
(
AbstractIdentifier
,
Error
)
>>
{
let
ids
:
Vec
<
AbstractIdentifier
>
=
object_pointer
.referenced_ids
()
.cloned
()
.collect
();
let
mut
possible_double_free_ids
=
Vec
::
new
();
if
ids
.len
()
>
1
{
for
id
in
ids
{
if
let
Err
(
error
)
=
self
.objects
.get_mut
(
&
id
)
.unwrap
()
.
0
.mark_as_maybe_freed
()
{
possible_double_free_ids
.push
((
id
.clone
(),
error
));
}
}
}
else
if
let
Some
(
id
)
=
ids
.get
(
0
)
{
if
let
Err
(
error
)
=
self
.objects
.get_mut
(
id
)
.unwrap
()
.
0
.mark_as_freed
()
{
possible_double_free_ids
.push
((
id
.clone
(),
error
));
}
}
if
possible_double_free_ids
.is_empty
()
{
Ok
(())
}
else
{
Err
(
possible_double_free_ids
)
}
}
}
src/cwe_checker_lib/src/analysis/pointer_inference/object_list/id_manipulation.rs
0 → 100644
View file @
05843314
//! Methods of [`AbstractObjectList`] related to manipulating abstract IDs.
use
super
::
*
;
impl
AbstractObjectList
{
/// Replace one abstract identifier with another one. Adjust offsets of all pointers accordingly.
///
/// **Example:**
/// Assume the `old_id` points to offset 0 in the corresponding memory object and the `new_id` points to offset -32.
/// Then the offset_adjustment is -32.
/// The offset_adjustment gets *added* to the base offset in `self.memory.ids` (so that it points to offset -32 in the memory object),
/// while it gets *subtracted* from all pointer values (so that they still point to the same spot in the corresponding memory object).
pub
fn
replace_abstract_id
(
&
mut
self
,
old_id
:
&
AbstractIdentifier
,
new_id
:
&
AbstractIdentifier
,
offset_adjustment
:
&
ValueDomain
,
)
{
let
negative_offset
=
-
offset_adjustment
.clone
();
for
(
object
,
_
)
in
self
.objects
.values_mut
()
{
object
.replace_abstract_id
(
old_id
,
new_id
,
&
negative_offset
);
}
if
let
Some
((
object
,
old_offset
))
=
self
.objects
.remove
(
old_id
)
{
let
new_offset
=
old_offset
+
offset_adjustment
.clone
();
self
.objects
.insert
(
new_id
.clone
(),
(
object
,
new_offset
));
}
}
/// Return all IDs that may be referenced by the memory object pointed to by the given ID.
/// The returned set is an overapproximation of the actual referenced IDs.
pub
fn
get_referenced_ids_overapproximation
(
&
self
,
id
:
&
AbstractIdentifier
,
)
->
BTreeSet
<
AbstractIdentifier
>
{
if
let
Some
((
object
,
_offset
))
=
self
.objects
.get
(
id
)
{
object
.get_referenced_ids_overapproximation
()
.clone
()
}
else
{
BTreeSet
::
new
()
}
}
/// Return all IDs that get referenced by the memory object pointed to by the given ID.
/// The returned set is an underapproximation of the actual referenced IDs,
/// since only still tracked pointers inside the memory object are used to compute it.
pub
fn
get_referenced_ids_underapproximation
(
&
self
,
id
:
&
AbstractIdentifier
,
)
->
BTreeSet
<
AbstractIdentifier
>
{
if
let
Some
((
object
,
_offset
))
=
self
.objects
.get
(
id
)
{
object
.get_referenced_ids_underapproximation
()
}
else
{
panic!
(
"Abstract ID not associated to an object"
)
}
}
}
src/cwe_checker_lib/src/analysis/pointer_inference/object_list/list_manipulation.rs
0 → 100644
View file @
05843314
//! Methods of [`AbstractObjectList`] that add or remove objects from the object list
//! or provide information about the set of objects in the object list.
use
super
::
*
;
impl
AbstractObjectList
{
/// Remove the memory object that `object_id` points to from the object list.
pub
fn
remove_object
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
)
{
self
.objects
.remove
(
object_id
);
}
/// Add a new abstract object to the object list
///
/// If an object with the same ID already exists,
/// the object is marked as non-unique and merged with the newly created object.
pub
fn
add_abstract_object
(
&
mut
self
,
object_id
:
AbstractIdentifier
,
initial_offset
:
ValueDomain
,
type_
:
ObjectType
,
address_bytesize
:
ByteSize
,
)
{
let
new_object
=
AbstractObject
::
new
(
type_
,
address_bytesize
);
if
let
Some
((
object
,
offset
))
=
self
.objects
.get_mut
(
&
object_id
)
{
// If the identifier already exists, we have to assume that more than one object may be referenced by this identifier.
object
.mark_as_not_unique
();
*
object
=
object
.merge
(
&
new_object
);
*
offset
=
offset
.merge
(
&
initial_offset
);
}
else
{
self
.objects
.insert
(
object_id
,
(
new_object
,
initial_offset
));
}
}
/// For abstract IDs not contained in the provided set of IDs
/// remove the corresponding abstract objects.
///
/// This function does not remove any pointer targets in the contained abstract objects.
pub
fn
remove_unused_objects
(
&
mut
self
,
ids_to_keep
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
let
all_ids
:
BTreeSet
<
AbstractIdentifier
>
=
self
.objects
.keys
()
.cloned
()
.collect
();
let
ids_to_remove
=
all_ids
.difference
(
ids_to_keep
);
for
id
in
ids_to_remove
{
self
.objects
.remove
(
id
);
}
}
/// Get all object IDs.
pub
fn
get_all_object_ids
(
&
self
)
->
BTreeSet
<
AbstractIdentifier
>
{
self
.objects
.keys
()
.cloned
()
.collect
()
}
/// Get the number of objects that are currently tracked.
#[cfg(test)]
pub
fn
get_num_objects
(
&
self
)
->
usize
{
self
.objects
.len
()
}
/// Append those objects from another object list, whose abstract IDs are not known to self.
pub
fn
append_unknown_objects
(
&
mut
self
,
other_object_list
:
&
AbstractObjectList
)
{
for
(
id
,
(
other_object
,
other_offset
))
in
other_object_list
.objects
.iter
()
{
if
self
.objects
.get
(
id
)
==
None
{
self
.objects
.insert
(
id
.clone
(),
(
other_object
.clone
(),
other_offset
.clone
()));
}
}
}
/// Remove the provided IDs as targets from all pointers in all objects.
/// Also remove the objects, that these IDs point to.
pub
fn
remove_ids
(
&
mut
self
,
ids_to_remove
:
&
BTreeSet
<
AbstractIdentifier
>
)
{
for
id
in
ids_to_remove
{
if
self
.objects
.get
(
id
)
.is_some
()
{
self
.objects
.remove
(
id
);
}
}
for
(
object
,
_
)
in
self
.objects
.values_mut
()
{
object
.remove_ids
(
ids_to_remove
);
}
}
}
src/cwe_checker_lib/src/analysis/pointer_inference/object_list/mod.rs
0 → 100644
View file @
05843314
use
super
::
object
::
*
;
use
super
::{
Data
,
ValueDomain
};
use
crate
::
prelude
::
*
;
use
crate
::{
abstract_domain
::
*
,
utils
::
binary
::
RuntimeMemoryImage
};
use
serde
::{
Deserialize
,
Serialize
};
use
std
::
collections
::{
BTreeMap
,
BTreeSet
};
mod
cwe_helpers
;
mod
id_manipulation
;
mod
list_manipulation
;
/// The list of all known abstract objects.
///
/// Each abstract object is unique in the sense that there is exactly one abstract identifier pointing to it.
/// However, an abstract object itself can be marked as non-unique
/// to indicate that it may represent more than one actual memory object.
#[derive(Serialize,
Deserialize,
Debug,
PartialEq,
Eq,
Clone)]
pub
struct
AbstractObjectList
{
/// The abstract objects.
///
/// Each abstract object comes with an offset given as a [`ValueDomain`].
/// This offset determines where the zero offset corresponding to the abstract identifier inside the object is.
/// Note that this offset may be a `Top` element
/// if the exact offset corresponding to the identifier is unknown.
objects
:
BTreeMap
<
AbstractIdentifier
,
(
AbstractObject
,
ValueDomain
)
>
,
}
impl
AbstractObjectList
{
/// Create a new abstract object list with just one abstract object corresponding to the stack.
///
/// The offset into the stack object and the `upper_index_bound` of the stack object will be both set to zero.
/// This corresponds to the generic stack state at the start of a function.
pub
fn
from_stack_id
(
stack_id
:
AbstractIdentifier
,
address_bytesize
:
ByteSize
,
)
->
AbstractObjectList
{
let
mut
objects
=
BTreeMap
::
new
();
let
mut
stack_object
=
AbstractObject
::
new
(
ObjectType
::
Stack
,
address_bytesize
);
stack_object
.set_upper_index_bound
(
Bitvector
::
zero
(
address_bytesize
.into
())
.into
());
objects
.insert
(
stack_id
,
(
stack_object
,
Bitvector
::
zero
(
apint
::
BitWidth
::
from
(
address_bytesize
))
.into
(),
),
);
AbstractObjectList
{
objects
}
}
/// Get the value at a given address.
/// If the address is not unique, merge the value of all possible addresses.
///
/// This function only checks for relative targets and not for absolute addresses.
/// If the address does not contain any relative targets an empty value is returned.
pub
fn
get_value
(
&
self
,
address
:
&
Data
,
size
:
ByteSize
)
->
Data
{
let
mut
merged_value
=
Data
::
new_empty
(
size
);
for
(
id
,
offset_pointer
)
in
address
.get_relative_values
()
{
if
let
Some
((
object
,
offset_identifier
))
=
self
.objects
.get
(
id
)
{
let
offset
=
offset_pointer
.clone
()
+
offset_identifier
.clone
();
if
let
Ok
(
concrete_offset
)
=
offset
.try_to_bitvec
()
{
let
value
=
object
.get_value
(
concrete_offset
,
size
);
merged_value
=
merged_value
.merge
(
&
value
);
}
else
{
merged_value
.set_contains_top_flag
();
}
}
else
{
merged_value
.set_contains_top_flag
();
}
}
if
address
.contains_top
()
{
merged_value
.set_contains_top_flag
();
}
merged_value
}
/// Set the value at a given address.
///
/// If the address has more than one target,
/// we merge-write the value to all targets.
pub
fn
set_value
(
&
mut
self
,
pointer
:
Data
,
value
:
Data
)
->
Result
<
(),
Error
>
{
let
targets
=
pointer
.get_relative_values
();
match
targets
.len
()
{
0
=>
Ok
(()),
1
=>
{
let
(
id
,
pointer_offset
)
=
targets
.iter
()
.next
()
.unwrap
();
let
(
object
,
id_offset
)
=
self
.objects
.get_mut
(
id
)
.unwrap
();
let
adjusted_offset
=
pointer_offset
.clone
()
+
id_offset
.clone
();
object
.set_value
(
value
,
&
adjusted_offset
)
}
_
=>
{
// There is more than one object that the pointer may write to.
// We merge-write to all possible targets
for
(
id
,
offset
)
in
targets
{
let
(
object
,
object_offset
)
=
self
.objects
.get_mut
(
id
)
.unwrap
();
let
adjusted_offset
=
offset
.clone
()
+
object_offset
.clone
();
object
.merge_value
(
value
.clone
(),
&
adjusted_offset
);
}
Ok
(())
}
}
}
/// Assume that arbitrary writes happened to a memory object,
/// including adding pointers to targets contained in `new_possible_reference_targets` to it.
///
/// This is used as a coarse approximation for function calls whose effect is unknown.
/// Note that this may still underestimate the effect of a function call:
/// We do not assume that the state of the object changes (i.e. no memory freed), which may not be true.
/// We assume that pointers to the object are *not* given to other threads or the operating system,
/// which could result in arbitrary writes to the object even after the function call returned.
pub
fn
assume_arbitrary_writes_to_object
(
&
mut
self
,
object_id
:
&
AbstractIdentifier
,
new_possible_reference_targets
:
&
BTreeSet
<
AbstractIdentifier
>
,
)
{
if
let
Some
((
object
,
_
))
=
self
.objects
.get_mut
(
object_id
)
{
object
.assume_arbitrary_writes
(
new_possible_reference_targets
);
}
}
/// Return the object type of a memory object.
/// Returns an error if no object with the given ID is contained in the object list.
pub
fn
get_object_type
(
&
self
,
object_id
:
&
AbstractIdentifier
,
)
->
Result
<
Option
<
ObjectType
>
,
()
>
{
match
self
.objects
.get
(
object_id
)
{
Some
((
object
,
_
))
=>
Ok
(
object
.get_object_type
()),
None
=>
Err
(()),
}
}
/// Returns `true` if the object corresponding to the given ID represents an unique object
/// and `false` if it may represent more than one object (e.g. several array elements).
/// Returns an error if the ID is not contained in the object list.
pub
fn
is_unique_object
(
&
self
,
object_id
:
&
AbstractIdentifier
)
->
Result
<
bool
,
Error
>
{
match
self
.objects
.get
(
object_id
)
{
Some
((
object
,
_
))
=>
Ok
(
object
.is_unique
()),
None
=>
Err
(
anyhow!
(
"Object ID not contained in object list."
)),
}
}
}
impl
AbstractDomain
for
AbstractObjectList
{
/// Merge two abstract object lists.
///
/// Right now this function is only sound if for each abstract object only one ID pointing to it exists.
/// Violations of this will be detected and result in panics.
/// Further investigation into the problem is needed
/// to decide, how to correctly represent and handle cases,
/// where more than one ID should point to the same object.
fn
merge
(
&
self
,
other
:
&
Self
)
->
Self
{
let
mut
merged_objects
=
self
.objects
.clone
();
for
(
id
,
(
other_object
,
other_offset
))
in
other
.objects
.iter
()
{
if
let
Some
((
object
,
offset
))
=
merged_objects
.get_mut
(
id
)
{
*
object
=
object
.merge
(
other_object
);
*
offset
=
offset
.merge
(
other_offset
);
}
else
{
merged_objects
.insert
(
id
.clone
(),
(
other_object
.clone
(),
other_offset
.clone
()));
}
}
AbstractObjectList
{
objects
:
merged_objects
,
}
}
/// Always returns `false`, since abstract object lists have no *Top* element.
fn
is_top
(
&
self
)
->
bool
{
false
}
}
impl
AbstractObjectList
{
/// Get a more compact json-representation of the abstract object list.
/// Intended for pretty printing, not useable for serialization/deserialization.
pub
fn
to_json_compact
(
&
self
)
->
serde_json
::
Value
{
use
serde_json
::
*
;
let
mut
object_map
=
Map
::
new
();
for
(
id
,
(
object
,
offset
))
in
self
.objects
.iter
()
{
object_map
.insert
(
format!
(
"{} (base offset {})"
,
id
,
offset
),
object
.to_json_compact
(),
);
}
Value
::
Object
(
object_map
)
}
}
#[cfg(test)]
mod
tests
;
src/cwe_checker_lib/src/analysis/pointer_inference/object_list/tests.rs
0 → 100644
View file @
05843314
use
super
::
*
;
fn
bv
(
value
:
i64
)
->
ValueDomain
{
ValueDomain
::
from
(
Bitvector
::
from_i64
(
value
))
}
fn
new_id
(
name
:
&
str
)
->
AbstractIdentifier
{
AbstractIdentifier
::
new
(
Tid
::
new
(
"time0"
),
AbstractLocation
::
Register
(
name
.into
(),
ByteSize
::
new
(
8
)),
)
}
#[test]
fn
abstract_object_list
()
{
let
mut
obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"RSP"
.into
()),
ByteSize
::
new
(
8
));
assert_eq!
(
obj_list
.objects
.len
(),
1
);
assert_eq!
(
obj_list
.objects
.values
()
.next
()
.unwrap
()
.
1
,
bv
(
0
));
let
pointer
=
DataDomain
::
from_target
(
new_id
(
"RSP"
.into
()),
bv
(
8
));
obj_list
.set_value
(
pointer
.clone
(),
bv
(
42
)
.into
())
.unwrap
();
assert_eq!
(
obj_list
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
bv
(
42
)
.into
()
);
let
mut
other_obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"RSP"
.into
()),
ByteSize
::
new
(
8
));
let
second_pointer
=
DataDomain
::
from_target
(
new_id
(
"RSP"
.into
()),
bv
(
-
8
));
other_obj_list
.set_value
(
pointer
.clone
(),
bv
(
42
)
.into
())
.unwrap
();
other_obj_list
.set_value
(
second_pointer
.clone
(),
bv
(
35
)
.into
())
.unwrap
();
assert_eq!
(
other_obj_list
.get_value
(
&
second_pointer
,
ByteSize
::
new
(
8
)),
bv
(
35
)
.into
()
);
other_obj_list
.add_abstract_object
(
new_id
(
"RAX"
.into
()),
bv
(
0
),
ObjectType
::
Heap
,
ByteSize
::
new
(
8
),
);
let
heap_pointer
=
DataDomain
::
from_target
(
new_id
(
"RAX"
.into
()),
bv
(
8
));
other_obj_list
.set_value
(
heap_pointer
.clone
(),
bv
(
3
)
.into
())
.unwrap
();
let
mut
merged
=
obj_list
.merge
(
&
other_obj_list
);
assert_eq!
(
merged
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
bv
(
42
)
.into
());
assert
!
(
merged
.get_value
(
&
second_pointer
,
ByteSize
::
new
(
8
))
.contains_top
());
assert_eq!
(
merged
.get_value
(
&
heap_pointer
,
ByteSize
::
new
(
8
)),
bv
(
3
)
.into
()
);
assert_eq!
(
merged
.objects
.len
(),
2
);
merged
.set_value
(
pointer
.merge
(
&
heap_pointer
),
bv
(
3
)
.into
())
.unwrap
();
assert_eq!
(
merged
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
IntervalDomain
::
mock
(
3
,
42
)
.with_stride
(
39
)
.into
()
);
assert_eq!
(
merged
.get_value
(
&
heap_pointer
,
ByteSize
::
new
(
8
)),
bv
(
3
)
.into
()
);
assert_eq!
(
merged
.objects
.len
(),
2
);
other_obj_list
.set_value
(
pointer
.clone
(),
heap_pointer
.clone
())
.unwrap
();
assert_eq!
(
other_obj_list
.get_referenced_ids_overapproximation
(
&
new_id
(
"RSP"
.into
()))
.len
(),
1
);
assert_eq!
(
*
other_obj_list
.get_referenced_ids_overapproximation
(
&
new_id
(
"RSP"
.into
()))
.iter
()
.next
()
.unwrap
(),
new_id
(
"RAX"
.into
())
);
let
modified_heap_pointer
=
DataDomain
::
from_target
(
new_id
(
"ID2"
.into
()),
bv
(
8
));
other_obj_list
.replace_abstract_id
(
&
new_id
(
"RAX"
.into
()),
&
new_id
(
"ID2"
.into
()),
&
bv
(
0
));
assert_eq!
(
other_obj_list
.get_value
(
&
pointer
,
ByteSize
::
new
(
8
)),
modified_heap_pointer
.clone
()
);
assert_eq!
(
other_obj_list
.objects
.get
(
&
new_id
(
"RAX"
.into
())),
None
);
assert
!
(
matches!
(
other_obj_list
.objects
.get
(
&
new_id
(
"ID2"
.into
())),
Some
(
_
)
));
let
mut
ids_to_keep
=
BTreeSet
::
new
();
ids_to_keep
.insert
(
new_id
(
"ID2"
.into
()));
other_obj_list
.remove_unused_objects
(
&
ids_to_keep
);
assert_eq!
(
other_obj_list
.objects
.len
(),
1
);
assert_eq!
(
other_obj_list
.objects
.iter
()
.next
()
.unwrap
()
.
0
,
&
new_id
(
"ID2"
.into
())
);
assert_eq!
(
other_obj_list
.objects
.values
()
.next
()
.unwrap
()
.
0
.get_state
(),
crate
::
analysis
::
pointer_inference
::
object
::
ObjectState
::
Alive
);
other_obj_list
.mark_mem_object_as_freed
(
&
modified_heap_pointer
)
.unwrap
();
assert_eq!
(
other_obj_list
.objects
.values
()
.next
()
.unwrap
()
.
0
.get_state
(),
crate
::
analysis
::
pointer_inference
::
object
::
ObjectState
::
Dangling
);
}
#[test]
fn
append_unknown_objects_test
()
{
let
mut
obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"stack"
),
ByteSize
::
new
(
8
));
let
mut
other_obj_list
=
AbstractObjectList
::
from_stack_id
(
new_id
(
"stack"
),
ByteSize
::
new
(
8
));
other_obj_list
.add_abstract_object
(
new_id
(
"heap_obj"
),
bv
(
0
)
.into
(),
ObjectType
::
Heap
,
ByteSize
::
new
(
8
),
);
obj_list
.append_unknown_objects
(
&
other_obj_list
);
assert_eq!
(
obj_list
.objects
.len
(),
2
);
assert
!
(
obj_list
.objects
.get
(
&
new_id
(
"stack"
))
.is_some
());
assert
!
(
obj_list
.objects
.get
(
&
new_id
(
"heap_obj"
))
.is_some
());
}
src/cwe_checker_lib/src/analysis/pointer_inference/state/tests.rs
View file @
05843314
...
...
@@ -397,7 +397,9 @@ fn reachable_ids_under_and_overapproximation() {
);
assert_eq!
(
state
.add_directly_reachable_ids_to_id_set
(
reachable_ids
.clone
()),
vec!
[
stack_id
.clone
()]
.into_iter
()
.collect
()
vec!
[
stack_id
.clone
(),
heap_id
.clone
()]
.into_iter
()
.collect
()
);
assert_eq!
(
state
.add_recursively_referenced_ids_to_id_set
(
reachable_ids
.clone
()),
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment