Commit 67440d78 by Enkelmann Committed by Enkelmann

Enable an installation with Ghidra as backend.

parent 3c76cb36
GHIDRA_PATH =
.PHONY: all clean test uninstall docker
all:
cargo build --release
mkdir -p ${HOME}/.config/cwe_checker
cp src/utils/registers.json ${HOME}/.config/cwe_checker/registers.json
cp src/config.json ${HOME}/.config/cwe_checker/config.json
ifdef GHIDRA_PATH
cargo install --path caller --locked
echo "{ \"ghidra_path\": \"${GHIDRA_PATH}\" }" > ${HOME}/.config/cwe_checker/ghidra.json
mkdir -p ${HOME}/.local/share/cwe_checker
cp -r ghidra ${HOME}/.local/share/cwe_checker/ghidra
else
cp target/release/libcwe_checker_rs.a src/libcwe_checker_rs.a
cp target/release/libcwe_checker_rs.so src/dllcwe_checker_rs.so
dune build
......@@ -10,9 +21,7 @@ all:
cd plugins/cwe_checker_type_inference && make all
cd plugins/cwe_checker_type_inference_print && make all
cd plugins/cwe_checker_pointer_inference_debug && make all
mkdir -p ${HOME}/.config/cwe_checker
cp src/utils/registers.json ${HOME}/.config/cwe_checker/registers.json
cp src/config.json ${HOME}/.config/cwe_checker/config.json
endif
test:
cargo test
......@@ -40,13 +49,15 @@ clean:
cd plugins/cwe_checker_pointer_inference_debug; make clean; cd ../..
uninstall:
rm -f -r ${HOME}/.config/cwe_checker
rm -f -r ${HOME}/.local/share/cwe_checker
cargo uninstall cwe_checker; echo ""
dune uninstall
cd plugins/cwe_checker; make uninstall; cd ../..
cd plugins/cwe_checker_emulation; make uninstall; cd ../..
cd plugins/cwe_checker_type_inference; make uninstall; cd ../..
cd plugins/cwe_checker_type_inference_print; make uninstall; cd ../..
cd plugins/cwe_checker_pointer_inference_debug; make uninstall; cd ../..
rm -f -r ${HOME}/.config/cwe_checker
documentation:
dune build @doc
......
......@@ -22,12 +22,17 @@ The following arguments should convince you to give *cwe_checker* a try:
- *cwe_checker* can be integrated as a plugin into [FACT](https://github.com/fkie-cad/FACT_core)
## Installation ##
### Using the docker image ###
The simplest way is to pull the latest Docker image from [dockerhub](https://hub.docker.com/r/fkiecad/cwe_checker):
- `docker pull fkiecad/cwe_checker:latest` yields an image based on the current master branch.
- `docker pull fkiecad/cwe_checker:stable` yields an image based on the latest stable release version.
If you want to build the docker image yourself, just run `docker build -t cwe_checker .`
### Local installation with BAP as backend ###
Another way is to get cwe_checker from the Ocaml package manager Opam. You can install cwe_checker via the package [cwe_checker](https://opam.ocaml.org/packages/cwe_checker/) (`opam install cwe_checker`). This gives you the latest stable release version of the *cwe_checker*.
If you plan to develop *cwe_checker*, it is recommended to build it using the provided `Makefile`. In this case you must ensure that all dependencies are fulfilled:
......@@ -46,6 +51,15 @@ If you plan to develop *cwe_checker*, it is recommended to build it using the pr
Just run `make all` to compile and register the plugin with BAP. You can run the test suite via `make test`. Documentation can be built via `make documentation`.
### Local installation with Ghidra as backend ###
The Ghidra backend is still in early development, thus many checks are not yet available for it. To try it out, the following dependencies must be fulfilled:
- [Rust](https://www.rust-lang.org) >= 1.44.1
- Ghidra >= 9.1
- The Java library `gson`. Download it from https://search.maven.org/artifact/com.google.code.gson/gson/2.8.6/jar and move it to the Ghidra plugin folder located at `$HOME/.ghidra/.ghidra_9.X.X_PUBLIC/plugins` (with the version number depending on your version of Ghidra).
Run `make all GHIDRA_PATH=path/to/ghidra_folder` (with the correct path to the local Ghidra installation inserted) to compile and install the *cwe_checker*.
## Usage ##
The *cwe_checker* takes as input a binary file, runs several [checks](#checks) based on static analysis on the binary and then outputs a list of CWE warnings that have been found during the analysis.
......
......@@ -6,3 +6,6 @@ edition = "2018"
[dependencies]
structopt = "0.3"
cwe_checker_rs = { path = "../cwe_checker_rs" }
serde_json = "1.0"
directories = "3.0"
\ No newline at end of file
use cwe_checker_rs::intermediate_representation::Project;
use cwe_checker_rs::utils::log::print_all_messages;
use cwe_checker_rs::utils::{get_ghidra_plugin_path, read_config_file};
use std::collections::HashSet;
use std::path::Path;
use std::process::Command;
use structopt::StructOpt;
// TODO: Add validation function for `--partial=???` parameter.
// TODO: `--partial` option needs better documentation on how to specify the list of checks to run.
// TODO: Add module version printing function
#[derive(Debug, StructOpt)]
/// Find vulnerable patterns in binary executables
struct CmdlineArgs {
......@@ -20,7 +21,7 @@ struct CmdlineArgs {
#[structopt(long, short)]
out: Option<String>,
/// Specify a specific set of checks to be run.
/// Specify a specific set of checks to be run as a comma separated list, e.g. 'CWE332,CWE476,CWE782'.
#[structopt(long, short)]
partial: Option<String>,
......@@ -39,17 +40,24 @@ struct CmdlineArgs {
/// Prints out the version numbers of all known modules.
#[structopt(long)]
module_versions: bool,
/// Use BAP as backend (instead of Ghidra). Requires BAP and the cwe_checker-BAP-plugin to be installed.
#[structopt(long, hidden = true)]
bap: bool,
}
fn main() {
let cmdline_args = CmdlineArgs::from_args();
if cmdline_args.module_versions {
println!("printing module versions");
todo!(); // TODO: implement!
} else if let Some(exit_code) = build_bap_command(&cmdline_args).status().unwrap().code() {
if cmdline_args.bap {
// Use BAP as backend
if let Some(exit_code) = build_bap_command(&cmdline_args).status().unwrap().code() {
std::process::exit(exit_code);
}
} else {
// Use Ghidra as backend
run_with_ghidra(cmdline_args);
}
}
/// Build the BAP command corresponding to the given command line arguments.
......@@ -92,3 +100,145 @@ fn check_file_existence(file_path: String) -> Result<(), String> {
Err(format!("{} is not a file.", file_path))
}
}
/// Run the cwe_checker with Ghidra as its backend.
fn run_with_ghidra(args: CmdlineArgs) {
let mut modules = cwe_checker_rs::get_modules();
if args.module_versions {
// Only print the module versions and then quit.
println!("[cwe_checker] module_versions:");
for module in modules.iter() {
println!("{}", module);
}
return;
}
if args.check_path {
panic!("Check-path module not yet implemented for the Ghidra backend");
}
// Get the configuration file
let config: serde_json::Value = if let Some(config_path) = args.config {
let file = std::io::BufReader::new(std::fs::File::open(config_path).unwrap());
serde_json::from_reader(file).expect("Parsing of the configuration file failed")
} else {
read_config_file("config.json")
};
// Filter the modules to be executed if the `--partial` parameter is set.
if let Some(ref partial_module_list) = args.partial {
filter_modules_for_partial_run(&mut modules, partial_module_list);
}
let project = get_project_from_ghidra(&Path::new(&args.binary.unwrap()));
// Execute the modules and collect their logs and CWE-warnings.
let mut all_logs = Vec::new();
let mut all_cwes = Vec::new();
for module in modules {
let (mut logs, mut cwes) = (module.run)(&project, &config[&module.name]);
all_logs.append(&mut logs);
all_cwes.append(&mut cwes);
}
// Print the results of the modules.
if args.quiet {
all_logs = Vec::new(); // Suppress all log messages since the `--quiet` flag is set.
}
print_all_messages(all_logs, all_cwes, args.out.as_deref(), args.json);
}
/// Only keep the modules specified by the `--partial` parameter in the `modules` list.
/// The parameter is a comma-separated list of module names, e.g. 'CWE332,CWE476,CWE782'.
fn filter_modules_for_partial_run(
modules: &mut Vec<&cwe_checker_rs::CweModule>,
partial_param: &str,
) {
let module_names: HashSet<&str> = partial_param.split(',').collect();
*modules = module_names
.into_iter()
.filter_map(|module_name| {
if let Some(module) = modules.iter().find(|module| module.name == module_name) {
Some(*module)
} else if module_name == "" {
None
} else {
panic!("Error: {} is not a valid module name.", module_name)
}
})
.collect();
}
/// Execute the `p_code_extractor` plugin in ghidra and parse its output into the `Project` data structure.
fn get_project_from_ghidra(file_path: &Path) -> Project {
let ghidra_path: std::path::PathBuf =
serde_json::from_value(read_config_file("ghidra.json")["ghidra_path"].clone())
.expect("Path to Ghidra not configured.");
let headless_path = ghidra_path.join("support/analyzeHeadless");
// Find the correct paths for temporary files.
let project_dirs = directories::ProjectDirs::from("", "", "cwe_checker")
.expect("Could not determine path for temporary files");
let tmp_folder = project_dirs
.runtime_dir()
.expect("Could not determine path for temporary files");
if !tmp_folder.exists() {
std::fs::create_dir(tmp_folder).expect("Unable to create temporary folder");
}
// We add a timestamp suffix to file names
// so that if two instances of the cwe_checker are running in parallel on the same file
// they do not interfere with each other.
let timestamp_suffix = format!(
"{:?}",
std::time::SystemTime::now()
.duration_since(std::time::SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis()
);
let output_filename = format!(
"{}_{}.json",
file_path
.file_name()
.expect("Invalid file name")
.to_string_lossy(),
timestamp_suffix
);
let output_path = tmp_folder.join(output_filename);
let ghidra_plugin_path = get_ghidra_plugin_path("p_code_extractor");
// Execute Ghidra
let output = Command::new(&headless_path)
.arg(&tmp_folder) // The folder where temporary files should be stored
.arg(format!("PcodeExtractor_{}", timestamp_suffix)) // The name of the temporary Ghidra Project.
.arg("-import") // Import a file into the Ghidra project
.arg(file_path) // File import path
.arg("-postScript") // Execute a script after standard analysis by Ghidra finished
.arg(ghidra_plugin_path.join("PcodeExtractor.java")) // Path to the PcodeExtractor.java
.arg(&output_path) // Output file path
.arg("-scriptPath") // Add a folder containing additional script files to the Ghidra script file search paths
.arg(ghidra_plugin_path) // Path to the folder containing the PcodeExtractor.java (so that the other java files can be found.)
.arg("-deleteProject") // Delete the temporary project after the script finished
.arg("-analysisTimeoutPerFile") // Set a timeout for how long the standard analysis can run before getting aborted
.arg("3600") // Timeout of one hour (=3600 seconds) // TODO: The post-script can detect that the timeout fired and react accordingly.
.output() // Execute the command and catch its output.
.unwrap();
if !output.status.success() {
match output.status.code() {
Some(code) => {
println!("{}", String::from_utf8(output.stdout).unwrap());
println!("{}", String::from_utf8(output.stderr).unwrap());
panic!("Execution of Ghidra plugin failed with exit code {}", code)
}
None => panic!("Execution of Ghidra plugin failed: Process was terminated."),
}
}
// Read the results from the Ghidra script
let file =
std::fs::File::open(&output_path).expect("Could not read results of the Ghidra script");
let mut project_pcode: cwe_checker_rs::pcode::Project =
serde_json::from_reader(std::io::BufReader::new(file)).unwrap();
project_pcode.normalize();
let project: Project = project_pcode.into();
// delete the temporary file again.
std::fs::remove_file(output_path).unwrap();
project
}
......@@ -19,4 +19,4 @@ directories = "3.0"
[lib]
name = "cwe_checker_rs"
crate-type = ["staticlib", "cdylib"]
crate-type = ["staticlib", "cdylib", "lib"]
......@@ -69,7 +69,7 @@ pub trait Context {
///
/// # Usage
///
/// ```
/// ```ignore
/// let mut computation = Computation::new(context, optional_default_node_value);
///
/// // set starting node values with computation.set_node_value(..)
......
......@@ -73,10 +73,10 @@ impl<'a> Context<'a> {
/// If `result` is an `Err`, log the error message as a debug message through the `log_collector` channel.
pub fn log_debug<'_lt>(&self, result: Result<(), Error>, location: Option<&'_lt Tid>) {
if let Err(err) = result {
let log_message = LogMessage {
text: format!("Pointer Inference: {}", err),
level: LogLevel::Debug,
location: location.cloned(),
let mut log_message =
LogMessage::new_debug(format!("{}", err)).source("Pointer Inference");
if let Some(loc) = location {
log_message = log_message.location(loc.clone());
};
self.log_collector.send(log_message).unwrap();
}
......
......@@ -35,6 +35,12 @@ use state::State;
/// The version number of the analysis.
const VERSION: &str = "0.1";
pub static CWE_MODULE: crate::CweModule = crate::CweModule {
name: "Memory",
version: VERSION,
run: run_analysis,
};
/// The abstract domain type for representing register values.
type Data = DataDomain<BitvectorDomain>;
......@@ -103,14 +109,10 @@ impl<'a> PointerInference<'a> {
let mut fixpoint_computation =
super::interprocedural_fixpoint::Computation::new(context, None);
log_sender
.send(LogMessage {
text: format!(
.send(LogMessage::new_debug(format!(
"Pointer Inference: Adding {} entry points",
entry_sub_to_entry_node_map.len()
),
level: LogLevel::Debug,
location: None,
})
)))
.unwrap();
for (sub_tid, start_node_index) in entry_sub_to_entry_node_map.into_iter() {
fixpoint_computation.set_node_value(
......@@ -255,18 +257,27 @@ impl<'a> PointerInference<'a> {
}
fn log_debug(&self, msg: impl Into<String>) {
let log_msg = LogMessage {
text: msg.into(),
level: LogLevel::Debug,
location: None,
};
let log_msg = LogMessage::new_debug(msg.into());
self.log_collector.send(log_msg).unwrap();
}
}
/// The main entry point for executing the pointer inference analysis.
pub fn run_analysis(
project: &Project,
analysis_params: &serde_json::Value,
) -> (Vec<LogMessage>, Vec<CweWarning>) {
let config: Config = serde_json::from_value(analysis_params.clone()).unwrap();
run(project, config, false)
}
/// Generate and execute the pointer inference analysis.
/// Returns a vector of all found CWE warnings and a vector of all log messages generated during analysis.
pub fn run(project: &Project, config: Config, print_debug: bool) -> (Vec<CweWarning>, Vec<String>) {
pub fn run(
project: &Project,
config: Config,
print_debug: bool,
) -> (Vec<LogMessage>, Vec<CweWarning>) {
let (cwe_sender, cwe_receiver) = crossbeam_channel::unbounded();
let (log_sender, log_receiver) = crossbeam_channel::unbounded();
......@@ -297,8 +308,8 @@ pub fn run(project: &Project, config: Config, print_debug: bool) -> (Vec<CweWarn
}
// Return the CWE warnings
(
warning_collector_thread.join().unwrap(),
log_collector_thread.join().unwrap(),
warning_collector_thread.join().unwrap(),
)
}
......@@ -320,7 +331,7 @@ fn collect_cwe_warnings(receiver: crossbeam_channel::Receiver<CweWarning>) -> Ve
}
/// Collect log messages from the receiver until the channel is closed. Then return them.
fn collect_logs(receiver: crossbeam_channel::Receiver<LogMessage>) -> Vec<String> {
fn collect_logs(receiver: crossbeam_channel::Receiver<LogMessage>) -> Vec<LogMessage> {
let mut logs_with_address = HashMap::new();
let mut general_logs = Vec::new();
while let Ok(log_message) = receiver.recv() {
......@@ -334,6 +345,5 @@ fn collect_logs(receiver: crossbeam_channel::Receiver<LogMessage>) -> Vec<String
.values()
.cloned()
.chain(general_logs.into_iter())
.map(|msg| msg.to_string())
.collect()
}
......@@ -13,9 +13,13 @@ fn run_pointer_inference(program_jsonbuilder_val: ocaml::Value) -> (Vec<CweWarni
project.replace_let_bindings();
let config: crate::analysis::pointer_inference::Config =
serde_json::from_value(crate::utils::read_config_file()["pointer_inference"].clone())
serde_json::from_value(crate::utils::read_config_file("config.json")["Memory"].clone())
.unwrap();
crate::analysis::pointer_inference::run(&project.into(), config, false)
let (logs, cwes) = crate::analysis::pointer_inference::run(&project.into(), config, false);
(
cwes,
logs.into_iter().map(|log| format! {"{}", log}).collect(),
)
}
caml!(rs_run_pointer_inference(program_jsonbuilder_val) {
......@@ -35,7 +39,7 @@ fn run_pointer_inference_and_print_debug(program_jsonbuilder_val: ocaml::Value)
project.replace_let_bindings();
let config: crate::analysis::pointer_inference::Config =
serde_json::from_value(crate::utils::read_config_file()["pointer_inference"].clone())
serde_json::from_value(crate::utils::read_config_file("config.json")["Memory"].clone())
.unwrap();
crate::analysis::pointer_inference::run(&project.into(), config, true); // Note: This discard all CweWarnings and log messages.
}
......
......@@ -7,6 +7,9 @@ Parts of the cwe_checker that are written in Rust.
#[macro_use]
extern crate ocaml;
use crate::intermediate_representation::Project;
use crate::utils::log::{CweWarning, LogMessage};
pub mod abstract_domain;
pub mod analysis;
pub mod bil;
......@@ -25,3 +28,26 @@ mod prelude {
pub use crate::intermediate_representation::{Term, Tid};
pub use anyhow::{anyhow, Error};
}
/// The generic function signature for the main function of a CWE module
pub type CweModuleFn = fn(&Project, &serde_json::Value) -> (Vec<LogMessage>, Vec<CweWarning>);
/// A structure containing general information about a CWE analysis module,
/// including the function to be called to run the analysis.
pub struct CweModule {
pub name: &'static str,
pub version: &'static str,
pub run: CweModuleFn,
}
impl std::fmt::Display for CweModule {
/// Print the module name and its version number.
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, r#""{}": "{}""#, self.name, self.version)
}
}
/// Get a list of all known analysis modules.
pub fn get_modules() -> Vec<&'static CweModule> {
vec![&crate::analysis::pointer_inference::CWE_MODULE]
}
......@@ -29,7 +29,6 @@ pub struct Jmp {
pub condition: Option<Variable>,
}
// TODO: CALLOTHER is still missing!
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone, Copy)]
pub enum JmpType {
BRANCH,
......
use crate::prelude::*;
/// A CWE warning message.
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone, PartialOrd, Ord, Default)]
pub struct CweWarning {
pub name: String,
......@@ -11,34 +12,128 @@ pub struct CweWarning {
pub description: String,
}
impl std::fmt::Display for CweWarning {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
formatter,
"[{}] ({}) {}",
self.name, self.version, self.description
)
}
}
/// A generic log message.
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone, PartialOrd, Ord)]
pub struct LogMessage {
/// The log message.
pub text: String,
/// The severity/type of the log message.
pub level: LogLevel,
/// The location inside the binary that the message is related to.
pub location: Option<Tid>,
/// The analysis where the message originated.
pub source: Option<String>,
}
impl LogMessage {
/// Create a new `Info`-level log message
pub fn new_info(text: impl Into<String>) -> LogMessage {
LogMessage {
text: text.into(),
level: LogLevel::Info,
location: None,
source: None,
}
}
/// Create a new `Debug`-level log message
pub fn new_debug(text: impl Into<String>) -> LogMessage {
LogMessage {
text: text.into(),
level: LogLevel::Debug,
location: None,
source: None,
}
}
/// Create a new `Error`-level log message
pub fn new_error(text: impl Into<String>) -> LogMessage {
LogMessage {
text: text.into(),
level: LogLevel::Error,
location: None,
source: None,
}
}
/// Associate a specific location to the log message.
pub fn location(mut self, location: Tid) -> LogMessage {
self.location = Some(location);
self
}
/// Set the name of the source analysis for the log message.
pub fn source(mut self, source: impl Into<String>) -> LogMessage {
self.source = Some(source.into());
self
}
}
/// The severity/type of a log message.
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone, PartialOrd, Ord)]
pub enum LogLevel {
/// Messages intended for debugging.
Debug,
/// Errors encountered during analysis.
Error,
/// Non-error messages intended for the user.
Info,
}
impl std::fmt::Display for LogMessage {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(ref tid) = self.location {
match self.level {
LogLevel::Debug => write!(formatter, "Debug: {}: {}", tid.address, self.text),
LogLevel::Error => write!(formatter, "Error: {}: {}", tid.address, self.text),
LogLevel::Info => write!(formatter, "Info: {}: {}", tid.address, self.text),
}
} else {
match self.level {
LogLevel::Debug => write!(formatter, "Debug: {}", self.text),
LogLevel::Error => write!(formatter, "Error: {}", self.text),
LogLevel::Info => write!(formatter, "Info: {}", self.text),
LogLevel::Debug => write!(formatter, "DEBUG: ")?,
LogLevel::Error => write!(formatter, "ERROR: ")?,
LogLevel::Info => write!(formatter, "INFO: ")?,
};
match (&self.source, &self.location) {
(Some(source), Some(location)) => write!(formatter, "{} @ {}: ", source, location)?,
(Some(source), None) => write!(formatter, "{}: ", source)?,
(None, Some(location)) => write!(formatter, "{}: ", location)?,
(None, None) => (),
};
write!(formatter, "{}", self.text)
}
}
/// Print all provided log- and CWE-messages.
///
/// Log-messages will always be printed to `stdout`.
/// CWE-warnings will either be printed to `stdout` or to the file path provided in `out_path`.
///
/// If `emit_json` is set, the CWE-warnings will be converted to json for the output.
pub fn print_all_messages(
logs: Vec<LogMessage>,
cwes: Vec<CweWarning>,
out_path: Option<&str>,
emit_json: bool,
) {
for log in logs {
println!("{}", log);
}
let output: String = if emit_json {
serde_json::to_string_pretty(&cwes).unwrap()
} else {
cwes.iter()
.map(|cwe| format!("{}", cwe))
.collect::<Vec<String>>()
.join("\n")
+ "\n"
};
if let Some(file_path) = out_path {
std::fs::write(file_path, output).unwrap();
} else {
print!("{}", output);
}
}
......@@ -31,13 +31,21 @@ pub fn get_generic_parameter_and_callee_saved_register(
(params, callee_saved)
}
/// Get the contents of the main configuration file.
pub fn read_config_file() -> serde_json::Value {
/// Get the contents of a configuration file.
pub fn read_config_file(filename: &str) -> serde_json::Value {
let project_dirs = directories::ProjectDirs::from("", "", "cwe_checker")
.expect("Could not discern location of configuration files.");
let config_dir = project_dirs.config_dir();
let config_path = config_dir.join("config.json");
let config_path = config_dir.join(filename);
let config_file =
std::fs::read_to_string(config_path).expect("Could not read register configuration file");
std::fs::read_to_string(config_path).expect("Could not read configuration file");
serde_json::from_str(&config_file).unwrap()
}
/// Get the folder path to a Ghidra plugin bundled with the cwe_checker.
pub fn get_ghidra_plugin_path(plugin_name: &str) -> std::path::PathBuf {
let project_dirs = directories::ProjectDirs::from("", "", "cwe_checker")
.expect("Could not discern location of data directory.");
let data_dir = project_dirs.data_dir();
data_dir.join("ghidra").join(plugin_name)
}
......@@ -21,9 +21,9 @@ let run (project: Project.t) (tid_map: Bap.Std.word Bap.Std.Tid.Map.t) : unit =
match message with
| `String message_string ->
begin match String.lsplit2 message_string ~on:':' with
| Some("Error", msg) -> Log_utils.error @@ String.strip msg
| Some("Debug", msg) -> Log_utils.debug @@ String.strip msg
| Some("Info", msg) -> Log_utils.info @@ String.strip msg
| Some("ERROR", msg) -> Log_utils.error @@ String.strip msg
| Some("DEBUG", msg) -> Log_utils.debug @@ String.strip msg
| Some("INFO", msg) -> Log_utils.info @@ String.strip msg
| _ -> failwith "Malformed log-message."
end
| _ -> failwith "Log-message is not a string."
......
......@@ -191,7 +191,7 @@
"scanf"
]
},
"pointer_inference": {
"Memory": {
"allocation_symbols": [
"malloc",
"calloc",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment