add regression testing tasks (#664)

This commit is contained in:
bmc-msft
2021-03-18 15:37:19 -04:00
committed by GitHub
parent 34b2a739cb
commit 6e60a8cf10
50 changed files with 2141 additions and 203 deletions

View File

@ -2,7 +2,11 @@
// Licensed under the MIT License.
#![allow(clippy::large_enum_variant)]
use crate::tasks::{analysis, coverage, fuzz, heartbeat::*, merge, report};
use crate::tasks::{
analysis, coverage, fuzz,
heartbeat::{init_task_heartbeat, TaskHeartbeatClient},
merge, regression, report,
};
use anyhow::Result;
use onefuzz::machine_id::{get_machine_id, get_scaleset_name};
use onefuzz_telemetry::{
@ -70,6 +74,9 @@ pub enum Config {
#[serde(alias = "libfuzzer_coverage")]
LibFuzzerCoverage(coverage::libfuzzer_coverage::Config),
#[serde(alias = "libfuzzer_regression")]
LibFuzzerRegression(regression::libfuzzer::Config),
#[serde(alias = "generic_analysis")]
GenericAnalysis(analysis::generic::Config),
@ -84,6 +91,9 @@ pub enum Config {
#[serde(alias = "generic_crash_report")]
GenericReport(report::generic::Config),
#[serde(alias = "generic_regression")]
GenericRegression(regression::generic::Config),
}
impl Config {
@ -104,11 +114,13 @@ impl Config {
Config::LibFuzzerMerge(c) => &mut c.common,
Config::LibFuzzerReport(c) => &mut c.common,
Config::LibFuzzerCoverage(c) => &mut c.common,
Config::LibFuzzerRegression(c) => &mut c.common,
Config::GenericAnalysis(c) => &mut c.common,
Config::GenericMerge(c) => &mut c.common,
Config::GenericReport(c) => &mut c.common,
Config::GenericSupervisor(c) => &mut c.common,
Config::GenericGenerator(c) => &mut c.common,
Config::GenericRegression(c) => &mut c.common,
}
}
@ -118,11 +130,13 @@ impl Config {
Config::LibFuzzerMerge(c) => &c.common,
Config::LibFuzzerReport(c) => &c.common,
Config::LibFuzzerCoverage(c) => &c.common,
Config::LibFuzzerRegression(c) => &c.common,
Config::GenericAnalysis(c) => &c.common,
Config::GenericMerge(c) => &c.common,
Config::GenericReport(c) => &c.common,
Config::GenericSupervisor(c) => &c.common,
Config::GenericGenerator(c) => &c.common,
Config::GenericRegression(c) => &c.common,
}
}
@ -132,11 +146,13 @@ impl Config {
Config::LibFuzzerMerge(_) => "libfuzzer_merge",
Config::LibFuzzerReport(_) => "libfuzzer_crash_report",
Config::LibFuzzerCoverage(_) => "libfuzzer_coverage",
Config::LibFuzzerRegression(_) => "libfuzzer_regression",
Config::GenericAnalysis(_) => "generic_analysis",
Config::GenericMerge(_) => "generic_merge",
Config::GenericReport(_) => "generic_crash_report",
Config::GenericSupervisor(_) => "generic_supervisor",
Config::GenericGenerator(_) => "generic_generator",
Config::GenericRegression(_) => "generic_regression",
};
match self {
@ -193,6 +209,16 @@ impl Config {
Config::GenericReport(config) => {
report::generic::ReportTask::new(config).managed_run().await
}
Config::GenericRegression(config) => {
regression::generic::GenericRegressionTask::new(config)
.run()
.await
}
Config::LibFuzzerRegression(config) => {
regression::libfuzzer::LibFuzzerRegressionTask::new(config)
.run()
.await
}
}
}
}

View File

@ -3,7 +3,7 @@
use crate::tasks::{
config::CommonConfig,
heartbeat::*,
heartbeat::{HeartbeatSender, TaskHeartbeatClient},
utils::{self, default_bool_true},
};
use anyhow::{Context, Result};

View File

@ -4,7 +4,7 @@
#![allow(clippy::too_many_arguments)]
use crate::tasks::{
config::{CommonConfig, ContainerType},
heartbeat::*,
heartbeat::{HeartbeatSender, TaskHeartbeatClient},
report::crash_report::monitor_reports,
stats::common::{monitor_stats, StatsFormat},
utils::CheckNotify,

View File

@ -3,7 +3,7 @@
use crate::tasks::{
config::CommonConfig,
heartbeat::*,
heartbeat::HeartbeatSender,
utils::{self, default_bool_true},
};
use anyhow::Result;

View File

@ -8,6 +8,7 @@ pub mod fuzz;
pub mod generic;
pub mod heartbeat;
pub mod merge;
pub mod regression;
pub mod report;
pub mod stats;
pub mod utils;

View File

@ -0,0 +1,162 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use crate::tasks::{
heartbeat::{HeartbeatSender, TaskHeartbeatClient},
report::crash_report::{parse_report_file, CrashTestResult, RegressionReport},
};
use anyhow::{Context, Result};
use async_trait::async_trait;
use onefuzz::syncdir::SyncedDir;
use reqwest::Url;
use std::path::PathBuf;
/// Defines implementation-provided callbacks for all implementers of regression tasks.
///
/// Shared regression task behavior is implemented in this module.
#[async_trait]
pub trait RegressionHandler {
/// Test the provided input and generate a crash result
/// * `input` - path to the input to test
/// * `input_url` - input url
async fn get_crash_result(&self, input: PathBuf, input_url: Url) -> Result<CrashTestResult>;
}
/// Runs the regression task
pub async fn run(
heartbeat_client: Option<TaskHeartbeatClient>,
regression_reports: &SyncedDir,
crashes: &SyncedDir,
report_dirs: &[&SyncedDir],
report_list: &Option<Vec<String>>,
readonly_inputs: &Option<SyncedDir>,
handler: &impl RegressionHandler,
) -> Result<()> {
info!("Starting generic regression task");
handle_crash_reports(
handler,
crashes,
report_dirs,
report_list,
&regression_reports,
&heartbeat_client,
)
.await?;
if let Some(readonly_inputs) = &readonly_inputs {
handle_inputs(
handler,
readonly_inputs,
&regression_reports,
&heartbeat_client,
)
.await?;
}
Ok(())
}
/// Run the regression on the files in the 'inputs' location
/// * `handler` - regression handler
/// * `readonly_inputs` - location of the input files
/// * `regression_reports` - where reports should be saved
/// * `heartbeat_client` - heartbeat client
pub async fn handle_inputs(
handler: &impl RegressionHandler,
readonly_inputs: &SyncedDir,
regression_reports: &SyncedDir,
heartbeat_client: &Option<TaskHeartbeatClient>,
) -> Result<()> {
readonly_inputs.init_pull().await?;
let mut input_files = tokio::fs::read_dir(&readonly_inputs.path).await?;
while let Some(file) = input_files.next_entry().await? {
heartbeat_client.alive();
let file_path = file.path();
if !file_path.is_file() {
continue;
}
let file_name = file_path
.file_name()
.ok_or_else(|| format_err!("missing filename"))?
.to_string_lossy()
.to_string();
let input_url = readonly_inputs.url.url().join(&file_name)?;
let crash_test_result = handler.get_crash_result(file_path, input_url).await?;
RegressionReport {
crash_test_result,
original_crash_test_result: None,
}
.save(None, regression_reports)
.await?
}
Ok(())
}
pub async fn handle_crash_reports(
handler: &impl RegressionHandler,
crashes: &SyncedDir,
report_dirs: &[&SyncedDir],
report_list: &Option<Vec<String>>,
regression_reports: &SyncedDir,
heartbeat_client: &Option<TaskHeartbeatClient>,
) -> Result<()> {
// without crash report containers, skip this method
if report_dirs.is_empty() {
return Ok(());
}
crashes.init_pull().await?;
for possible_dir in report_dirs {
possible_dir.init_pull().await?;
let mut report_files = tokio::fs::read_dir(&possible_dir.path).await?;
while let Some(file) = report_files.next_entry().await? {
heartbeat_client.alive();
let file_path = file.path();
if !file_path.is_file() {
continue;
}
let file_name = file_path
.file_name()
.ok_or_else(|| format_err!("missing filename"))?
.to_string_lossy()
.to_string();
if let Some(report_list) = &report_list {
if !report_list.contains(&file_name) {
continue;
}
}
let original_crash_test_result = parse_report_file(file.path())
.await
.with_context(|| format!("unable to parse crash report: {}", file_name))?;
let input_blob = match &original_crash_test_result {
CrashTestResult::CrashReport(x) => x.input_blob.clone(),
CrashTestResult::NoRepro(x) => x.input_blob.clone(),
}
.ok_or_else(|| format_err!("crash report is missing input blob: {}", file_name))?;
let input_url = crashes.url.blob(&input_blob.name).url();
let input = crashes.path.join(&input_blob.name);
let crash_test_result = handler.get_crash_result(input, input_url).await?;
RegressionReport {
crash_test_result,
original_crash_test_result: Some(original_crash_test_result),
}
.save(Some(file_name), regression_reports)
.await?
}
}
Ok(())
}

View File

@ -0,0 +1,109 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use crate::tasks::{
config::CommonConfig,
report::{crash_report::CrashTestResult, generic},
utils::default_bool_true,
};
use anyhow::Result;
use async_trait::async_trait;
use onefuzz::syncdir::SyncedDir;
use reqwest::Url;
use serde::Deserialize;
use std::{collections::HashMap, path::PathBuf};
use super::common::{self, RegressionHandler};
#[derive(Debug, Deserialize)]
pub struct Config {
pub target_exe: PathBuf,
#[serde(default)]
pub target_options: Vec<String>,
#[serde(default)]
pub target_env: HashMap<String, String>,
pub target_timeout: Option<u64>,
pub crashes: SyncedDir,
pub regression_reports: SyncedDir,
pub report_list: Option<Vec<String>>,
pub reports: Option<SyncedDir>,
pub unique_reports: Option<SyncedDir>,
pub no_repro: Option<SyncedDir>,
pub readonly_inputs: Option<SyncedDir>,
#[serde(default)]
pub check_asan_log: bool,
#[serde(default = "default_bool_true")]
pub check_debugger: bool,
#[serde(default)]
pub check_retry_count: u64,
#[serde(default)]
pub minimized_stack_depth: Option<usize>,
#[serde(flatten)]
pub common: CommonConfig,
}
pub struct GenericRegressionTask {
config: Config,
}
#[async_trait]
impl RegressionHandler for GenericRegressionTask {
async fn get_crash_result(&self, input: PathBuf, input_url: Url) -> Result<CrashTestResult> {
let args = generic::TestInputArgs {
input_url: Some(input_url),
input: &input,
target_exe: &self.config.target_exe,
target_options: &self.config.target_options,
target_env: &self.config.target_env,
setup_dir: &self.config.common.setup_dir,
task_id: self.config.common.task_id,
job_id: self.config.common.job_id,
target_timeout: self.config.target_timeout,
check_retry_count: self.config.check_retry_count,
check_asan_log: self.config.check_asan_log,
check_debugger: self.config.check_debugger,
minimized_stack_depth: self.config.minimized_stack_depth,
};
generic::test_input(args).await
}
}
impl GenericRegressionTask {
pub fn new(config: Config) -> Self {
Self { config }
}
pub async fn run(&self) -> Result<()> {
info!("Starting generic regression task");
let heartbeat_client = self.config.common.init_heartbeat().await?;
let mut report_dirs = vec![];
for dir in &[
&self.config.reports,
&self.config.unique_reports,
&self.config.no_repro,
] {
if let Some(dir) = dir {
report_dirs.push(dir);
}
}
common::run(
heartbeat_client,
&self.config.regression_reports,
&self.config.crashes,
&report_dirs,
&self.config.report_list,
&self.config.readonly_inputs,
self,
)
.await?;
Ok(())
}
}

View File

@ -0,0 +1,107 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use crate::tasks::{
config::CommonConfig,
report::{crash_report::CrashTestResult, libfuzzer_report},
utils::default_bool_true,
};
use anyhow::Result;
use reqwest::Url;
use super::common::{self, RegressionHandler};
use async_trait::async_trait;
use onefuzz::syncdir::SyncedDir;
use serde::Deserialize;
use std::{collections::HashMap, path::PathBuf};
#[derive(Debug, Deserialize)]
pub struct Config {
pub target_exe: PathBuf,
#[serde(default)]
pub target_options: Vec<String>,
#[serde(default)]
pub target_env: HashMap<String, String>,
pub target_timeout: Option<u64>,
pub crashes: SyncedDir,
pub regression_reports: SyncedDir,
pub report_list: Option<Vec<String>>,
pub unique_reports: Option<SyncedDir>,
pub reports: Option<SyncedDir>,
pub no_repro: Option<SyncedDir>,
pub readonly_inputs: Option<SyncedDir>,
#[serde(default = "default_bool_true")]
pub check_fuzzer_help: bool,
#[serde(default)]
pub check_retry_count: u64,
#[serde(default)]
pub minimized_stack_depth: Option<usize>,
#[serde(flatten)]
pub common: CommonConfig,
}
pub struct LibFuzzerRegressionTask {
config: Config,
}
#[async_trait]
impl RegressionHandler for LibFuzzerRegressionTask {
async fn get_crash_result(&self, input: PathBuf, input_url: Url) -> Result<CrashTestResult> {
let args = libfuzzer_report::TestInputArgs {
input_url: Some(input_url),
input: &input,
target_exe: &self.config.target_exe,
target_options: &self.config.target_options,
target_env: &self.config.target_env,
setup_dir: &self.config.common.setup_dir,
task_id: self.config.common.task_id,
job_id: self.config.common.job_id,
target_timeout: self.config.target_timeout,
check_retry_count: self.config.check_retry_count,
minimized_stack_depth: self.config.minimized_stack_depth,
};
libfuzzer_report::test_input(args).await
}
}
impl LibFuzzerRegressionTask {
pub fn new(config: Config) -> Self {
Self { config }
}
pub async fn run(&self) -> Result<()> {
info!("Starting libfuzzer regression task");
let mut report_dirs = vec![];
for dir in &[
&self.config.reports,
&self.config.unique_reports,
&self.config.no_repro,
] {
if let Some(dir) = dir {
report_dirs.push(dir);
}
}
let heartbeat_client = self.config.common.init_heartbeat().await?;
common::run(
heartbeat_client,
&self.config.regression_reports,
&self.config.crashes,
&report_dirs,
&self.config.report_list,
&self.config.readonly_inputs,
self,
)
.await?;
Ok(())
}
}

View File

@ -0,0 +1,6 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
pub mod common;
pub mod generic;
pub mod libfuzzer;

View File

@ -5,7 +5,10 @@ use anyhow::{Context, Result};
use futures::StreamExt;
use onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir};
use onefuzz_telemetry::{
Event::{new_report, new_unable_to_reproduce, new_unique_report},
Event::{
new_report, new_unable_to_reproduce, new_unique_report, regression_report,
regression_unable_to_reproduce,
},
EventData,
};
use serde::{Deserialize, Serialize};
@ -46,6 +49,7 @@ pub struct CrashReport {
pub job_id: Uuid,
pub scariness_score: Option<u32>,
pub scariness_description: Option<String>,
}
@ -62,11 +66,42 @@ pub struct NoCrash {
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum CrashTestResult {
CrashReport(CrashReport),
NoRepro(NoCrash),
}
#[derive(Debug, Deserialize, Serialize)]
pub struct RegressionReport {
pub crash_test_result: CrashTestResult,
pub original_crash_test_result: Option<CrashTestResult>,
}
impl RegressionReport {
pub async fn save(
self,
report_name: Option<String>,
regression_reports: &SyncedDir,
) -> Result<()> {
let (event, name) = match &self.crash_test_result {
CrashTestResult::CrashReport(report) => {
let name = report_name.unwrap_or_else(|| report.unique_blob_name());
(regression_report, name)
}
CrashTestResult::NoRepro(report) => {
let name = report_name.unwrap_or_else(|| report.blob_name());
(regression_unable_to_reproduce, name)
}
};
if upload_or_save_local(&self, &name, regression_reports).await? {
event!(event; EventData::Path = name);
}
Ok(())
}
}
async fn upload_or_save_local<T: Serialize>(
report: &T,
dest_name: &str,
@ -76,6 +111,10 @@ async fn upload_or_save_local<T: Serialize>(
}
impl CrashTestResult {
/// Saves the crash result as a crash report
/// * `unique_reports` - location to save the deduplicated report if the bug was reproduced
/// * `reports` - location to save the report if the bug was reproduced
/// * `no_repro` - location to save the report if the bug was not reproduced
pub async fn save(
&self,
unique_reports: &Option<SyncedDir>,
@ -113,7 +152,7 @@ impl CrashTestResult {
}
}
#[derive(Debug, Deserialize, Serialize)]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct InputBlob {
pub account: Option<String>,
pub container: Option<String>,
@ -188,7 +227,7 @@ impl NoCrash {
}
}
async fn parse_report_file(path: PathBuf) -> Result<CrashTestResult> {
pub async fn parse_report_file(path: PathBuf) -> Result<CrashTestResult> {
let raw = std::fs::read_to_string(&path)
.with_context(|| format_err!("unable to open crash report: {}", path.display()))?;

View File

@ -5,7 +5,7 @@ use super::crash_report::{CrashReport, CrashTestResult, InputBlob, NoCrash};
use crate::tasks::{
config::CommonConfig,
generic::input_poller::{CallbackImpl, InputPoller, Processor},
heartbeat::*,
heartbeat::{HeartbeatSender, TaskHeartbeatClient},
utils::default_bool_true,
};
use anyhow::Result;
@ -18,6 +18,7 @@ use std::{
path::{Path, PathBuf},
};
use storage_queue::{Message, QueueClient};
use uuid::Uuid;
#[derive(Debug, Deserialize)]
pub struct Config {
@ -86,27 +87,103 @@ impl ReportTask {
}
}
pub struct TestInputArgs<'a> {
pub input_url: Option<Url>,
pub input: &'a Path,
pub target_exe: &'a Path,
pub target_options: &'a [String],
pub target_env: &'a HashMap<String, String>,
pub setup_dir: &'a Path,
pub task_id: Uuid,
pub job_id: Uuid,
pub target_timeout: Option<u64>,
pub check_retry_count: u64,
pub check_asan_log: bool,
pub check_debugger: bool,
pub minimized_stack_depth: Option<usize>,
}
pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
let tester = Tester::new(
args.setup_dir,
args.target_exe,
args.target_options,
args.target_env,
)
.check_asan_log(args.check_asan_log)
.check_debugger(args.check_debugger)
.check_retry_count(args.check_retry_count)
.set_optional(args.target_timeout, |tester, timeout| {
tester.timeout(timeout)
});
let input_sha256 = sha256::digest_file(args.input).await?;
let task_id = args.task_id;
let job_id = args.job_id;
let input_blob = args
.input_url
.and_then(|u| BlobUrl::new(u).ok())
.map(InputBlob::from);
let test_report = tester.test_input(args.input).await?;
if let Some(crash_log) = test_report.asan_log {
let crash_report = CrashReport::new(
crash_log,
task_id,
job_id,
args.target_exe,
input_blob,
input_sha256,
args.minimized_stack_depth,
);
Ok(CrashTestResult::CrashReport(crash_report))
} else if let Some(crash) = test_report.crash {
let call_stack_sha256 = sha256::digest_iter(&crash.call_stack);
let crash_report = CrashReport {
input_blob,
input_sha256,
executable: PathBuf::from(args.target_exe),
call_stack: crash.call_stack,
crash_type: crash.crash_type,
crash_site: crash.crash_site,
call_stack_sha256,
asan_log: None,
scariness_score: None,
scariness_description: None,
task_id,
job_id,
minimized_stack: vec![],
minimized_stack_sha256: None,
minimized_stack_function_names: vec![],
minimized_stack_function_names_sha256: None,
};
Ok(CrashTestResult::CrashReport(crash_report))
} else {
let no_repro = NoCrash {
input_blob,
input_sha256,
executable: PathBuf::from(args.target_exe),
task_id,
job_id,
tries: 1 + args.check_retry_count,
error: test_report.error.map(|e| format!("{}", e)),
};
Ok(CrashTestResult::NoRepro(no_repro))
}
}
pub struct GenericReportProcessor<'a> {
config: &'a Config,
tester: Tester<'a>,
heartbeat_client: Option<TaskHeartbeatClient>,
}
impl<'a> GenericReportProcessor<'a> {
pub fn new(config: &'a Config, heartbeat_client: Option<TaskHeartbeatClient>) -> Self {
let tester = Tester::new(
&config.common.setup_dir,
&config.target_exe,
&config.target_options,
&config.target_env,
)
.check_asan_log(config.check_asan_log)
.check_debugger(config.check_debugger)
.check_retry_count(config.check_retry_count);
Self {
config,
tester,
heartbeat_client,
}
}
@ -117,56 +194,25 @@ impl<'a> GenericReportProcessor<'a> {
input: &Path,
) -> Result<CrashTestResult> {
self.heartbeat_client.alive();
let input_sha256 = sha256::digest_file(input).await?;
let task_id = self.config.common.task_id;
let job_id = self.config.common.job_id;
let input_blob = match input_url {
Some(x) => Some(InputBlob::from(BlobUrl::new(x)?)),
None => None,
let args = TestInputArgs {
input_url,
input,
target_exe: &self.config.target_exe,
target_options: &self.config.target_options,
target_env: &self.config.target_env,
setup_dir: &self.config.common.setup_dir,
task_id: self.config.common.task_id,
job_id: self.config.common.job_id,
target_timeout: self.config.target_timeout,
check_retry_count: self.config.check_retry_count,
check_asan_log: self.config.check_asan_log,
check_debugger: self.config.check_debugger,
minimized_stack_depth: self.config.minimized_stack_depth,
};
let result = test_input(args).await?;
let test_report = self.tester.test_input(input).await?;
if let Some(asan_log) = test_report.asan_log {
let crash_report = CrashReport::new(
asan_log,
task_id,
job_id,
&self.config.target_exe,
input_blob,
input_sha256,
self.config.minimized_stack_depth,
);
Ok(CrashTestResult::CrashReport(crash_report))
} else if let Some(crash) = test_report.crash {
let call_stack_sha256 = sha256::digest_iter(&crash.call_stack);
let crash_report = CrashReport {
input_blob,
input_sha256,
executable: PathBuf::from(&self.config.target_exe),
call_stack: crash.call_stack,
crash_type: crash.crash_type,
crash_site: crash.crash_site,
call_stack_sha256,
task_id,
job_id,
..Default::default()
};
Ok(CrashTestResult::CrashReport(crash_report))
} else {
let no_repro = NoCrash {
input_blob,
input_sha256,
executable: PathBuf::from(&self.config.target_exe),
task_id,
job_id,
tries: 1 + self.config.check_retry_count,
error: test_report.error.map(|e| format!("{}", e)),
};
Ok(CrashTestResult::NoRepro(no_repro))
}
Ok(result)
}
}

View File

@ -3,7 +3,10 @@
use super::crash_report::*;
use crate::tasks::{
config::CommonConfig, generic::input_poller::*, heartbeat::*, utils::default_bool_true,
config::CommonConfig,
generic::input_poller::*,
heartbeat::{HeartbeatSender, TaskHeartbeatClient},
utils::default_bool_true,
};
use anyhow::{Context, Result};
use async_trait::async_trait;
@ -75,7 +78,7 @@ impl ReportTask {
let mut processor = AsanProcessor::new(self.config.clone()).await?;
if let Some(crashes) = &self.config.crashes {
self.poller.batch_process(&mut processor, crashes).await?;
self.poller.batch_process(&mut processor, &crashes).await?;
}
if self.config.check_queue {
@ -88,6 +91,72 @@ impl ReportTask {
}
}
pub struct TestInputArgs<'a> {
pub input_url: Option<Url>,
pub input: &'a Path,
pub target_exe: &'a Path,
pub target_options: &'a [String],
pub target_env: &'a HashMap<String, String>,
pub setup_dir: &'a Path,
pub task_id: uuid::Uuid,
pub job_id: uuid::Uuid,
pub target_timeout: Option<u64>,
pub check_retry_count: u64,
pub minimized_stack_depth: Option<usize>,
}
pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
let fuzzer = LibFuzzer::new(
args.target_exe,
args.target_options,
args.target_env,
args.setup_dir,
);
let task_id = args.task_id;
let job_id = args.job_id;
let input_blob = args
.input_url
.and_then(|u| BlobUrl::new(u).ok())
.map(InputBlob::from);
let input = args.input;
let input_sha256 = sha256::digest_file(args.input)
.await
.with_context(|| format_err!("unable to sha256 digest input file: {}", input.display()))?;
let test_report = fuzzer
.repro(args.input, args.target_timeout, args.check_retry_count)
.await?;
match test_report.asan_log {
Some(crash_log) => {
let crash_report = CrashReport::new(
crash_log,
task_id,
job_id,
args.target_exe,
input_blob,
input_sha256,
args.minimized_stack_depth,
);
Ok(CrashTestResult::CrashReport(crash_report))
}
None => {
let no_repro = NoCrash {
input_blob,
input_sha256,
executable: PathBuf::from(&args.target_exe),
task_id,
job_id,
tries: 1 + args.check_retry_count,
error: test_report.error.map(|e| format!("{}", e)),
};
Ok(CrashTestResult::NoRepro(no_repro))
}
}
}
pub struct AsanProcessor {
config: Arc<Config>,
heartbeat_client: Option<TaskHeartbeatClient>,
@ -109,58 +178,22 @@ impl AsanProcessor {
input: &Path,
) -> Result<CrashTestResult> {
self.heartbeat_client.alive();
let fuzzer = LibFuzzer::new(
&self.config.target_exe,
&self.config.target_options,
&self.config.target_env,
&self.config.common.setup_dir,
);
let task_id = self.config.common.task_id;
let job_id = self.config.common.job_id;
let input_blob = match input_url {
Some(x) => Some(InputBlob::from(BlobUrl::new(x)?)),
None => None,
let args = TestInputArgs {
input_url,
input,
target_exe: &self.config.target_exe,
target_options: &self.config.target_options,
target_env: &self.config.target_env,
setup_dir: &self.config.common.setup_dir,
task_id: self.config.common.task_id,
job_id: self.config.common.job_id,
target_timeout: self.config.target_timeout,
check_retry_count: self.config.check_retry_count,
minimized_stack_depth: self.config.minimized_stack_depth,
};
let input_sha256 = sha256::digest_file(input).await.with_context(|| {
format_err!("unable to sha256 digest input file: {}", input.display())
})?;
let result = test_input(args).await?;
let test_report = fuzzer
.repro(
input,
self.config.target_timeout,
self.config.check_retry_count,
)
.await?;
match test_report.asan_log {
Some(asan_log) => {
let crash_report = CrashReport::new(
asan_log,
task_id,
job_id,
&self.config.target_exe,
input_blob,
input_sha256,
self.config.minimized_stack_depth,
);
Ok(CrashTestResult::CrashReport(crash_report))
}
None => {
let no_repro = NoCrash {
input_blob,
input_sha256,
executable: PathBuf::from(&self.config.target_exe),
task_id,
job_id,
tries: 1 + self.config.check_retry_count,
error: test_report.error.map(|e| format!("{}", e)),
};
Ok(CrashTestResult::NoRepro(no_repro))
}
}
Ok(result)
}
}