local_run refactoring (#508)

## Summary of the Pull Request

This is a refactoring of the local debugging.
- The input queue as been abstracted and can now be locally 
- The SyncDir can now sync a local directory
- Added the ability to monitor a local directory with a queue

## Reviewers Notes
The most import part of the PR are 
- The queue abstraction and implementations 
    - src/agent/storage-queue/src/azure_queue.rs
    - src/agent/storage-queue/src/local_queue.rs
    - src/agent/storage-queue/src/lib.rs
- Changes to support local files in 
    - src/agent/onefuzz/src/syncdir.rs
- Example of how those abstractions are used to link task together
    - src/agent/onefuzz-agent/src/local/libfuzzer_fuzz.rs
    - src/agent/onefuzz-agent/src/local/common.rs

## Validation Steps Performed

_How does someone test & validate?_
This commit is contained in:
Cheick Keita
2021-03-17 12:32:45 -07:00
committed by GitHub
parent 85335d462c
commit 4b07fdc455
59 changed files with 2260 additions and 958 deletions

View File

@ -13,6 +13,7 @@ integration_test=[]
anyhow = "1.0"
appinsights = "0.1"
async-trait = "0.1"
backoff = { version = "0.3", features = ["async-std"] }
clap = "2.33"
tempfile = "3.2"
env_logger = "0.8"
@ -32,6 +33,7 @@ onefuzz = { path = "../onefuzz" }
storage-queue = { path = "../storage-queue" }
reqwest-retry = { path = "../reqwest-retry" }
onefuzz-telemetry = { path = "../onefuzz-telemetry" }
path-absolutize = "3.0.6"
[dev-dependencies]
tempfile = "3.2"

View File

@ -1,24 +0,0 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use anyhow::Result;
use clap::{App, SubCommand};
use crate::{debug::libfuzzer_merge, local::common::add_common_config};
const LIBFUZZER_MERGE: &str = "libfuzzer-merge";
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
match args.subcommand() {
(LIBFUZZER_MERGE, Some(sub)) => libfuzzer_merge::run(sub).await,
_ => {
anyhow::bail!("missing subcommand\nUSAGE: {}", args.usage());
}
}
}
pub fn args(name: &str) -> App<'static, 'static> {
SubCommand::with_name(name)
.about("unsupported internal debugging commands")
.subcommand(add_common_config(libfuzzer_merge::args(LIBFUZZER_MERGE)))
}

View File

@ -1,58 +0,0 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use crate::{
local::common::{
add_cmd_options, build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, CmdType,
},
tasks::merge::libfuzzer_merge::{merge_inputs, Config},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use onefuzz::syncdir::SyncedDir;
use std::sync::Arc;
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_env = get_cmd_env(CmdType::Target, args)?;
let target_options = get_cmd_arg(CmdType::Target, args);
let inputs = value_t!(args, "inputs", String)?;
let unique_inputs = value_t!(args, "unique_inputs", String)?;
let check_fuzzer_help = false;
let common = build_common_config(args)?;
let config = Arc::new(Config {
target_exe,
target_env,
target_options,
check_fuzzer_help,
input_queue: None,
inputs: vec![SyncedDir {
path: inputs.into(),
url: None,
}],
unique_inputs: SyncedDir {
path: unique_inputs.into(),
url: None,
},
common,
preserve_existing_outputs: true,
});
let results = merge_inputs(config.clone(), vec![config.clone().inputs[0].path.clone()]).await?;
println!("{:#?}", results);
Ok(())
}
pub fn args(name: &'static str) -> App<'static, 'static> {
let mut app = SubCommand::with_name(name).about("execute a local-only libfuzzer merge task");
app = add_cmd_options(CmdType::Target, true, true, true, app);
app.arg(Arg::with_name("inputs").takes_value(true).required(true))
.arg(
Arg::with_name("unique_inputs")
.takes_value(true)
.required(true),
)
}

View File

@ -1,5 +0,0 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
pub mod cmd;
pub mod libfuzzer_merge;

View File

@ -5,8 +5,9 @@ use anyhow::Result;
use clap::{App, SubCommand};
use crate::local::{
common::add_common_config, generic_crash_report, generic_generator, libfuzzer,
libfuzzer_coverage, libfuzzer_crash_report, libfuzzer_fuzz, radamsa,
common::add_common_config, generic_analysis, generic_crash_report, generic_generator,
libfuzzer, libfuzzer_coverage, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge,
radamsa,
};
const RADAMSA: &str = "radamsa";
@ -14,8 +15,10 @@ const LIBFUZZER: &str = "libfuzzer";
const LIBFUZZER_FUZZ: &str = "libfuzzer-fuzz";
const LIBFUZZER_CRASH_REPORT: &str = "libfuzzer-crash-report";
const LIBFUZZER_COVERAGE: &str = "libfuzzer-coverage";
const LIBFUZZER_MERGE: &str = "libfuzzer-merge";
const GENERIC_CRASH_REPORT: &str = "generic-crash-report";
const GENERIC_GENERATOR: &str = "generic-generator";
const GENERIC_ANALYSIS: &str = "generic-analysis";
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
match args.subcommand() {
@ -24,6 +27,8 @@ pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
(LIBFUZZER_FUZZ, Some(sub)) => libfuzzer_fuzz::run(sub).await,
(LIBFUZZER_COVERAGE, Some(sub)) => libfuzzer_coverage::run(sub).await,
(LIBFUZZER_CRASH_REPORT, Some(sub)) => libfuzzer_crash_report::run(sub).await,
(LIBFUZZER_MERGE, Some(sub)) => libfuzzer_merge::run(sub).await,
(GENERIC_ANALYSIS, Some(sub)) => generic_analysis::run(sub).await,
(GENERIC_CRASH_REPORT, Some(sub)) => generic_crash_report::run(sub).await,
(GENERIC_GENERATOR, Some(sub)) => generic_generator::run(sub).await,
_ => {
@ -41,6 +46,7 @@ pub fn args(name: &str) -> App<'static, 'static> {
.subcommand(add_common_config(libfuzzer_coverage::args(
LIBFUZZER_COVERAGE,
)))
.subcommand(add_common_config(libfuzzer_merge::args(LIBFUZZER_MERGE)))
.subcommand(add_common_config(libfuzzer_crash_report::args(
LIBFUZZER_CRASH_REPORT,
)))
@ -50,4 +56,5 @@ pub fn args(name: &str) -> App<'static, 'static> {
.subcommand(add_common_config(generic_generator::args(
GENERIC_GENERATOR,
)))
.subcommand(add_common_config(generic_analysis::args(GENERIC_ANALYSIS)))
}

View File

@ -2,10 +2,20 @@ use crate::tasks::config::CommonConfig;
use crate::tasks::utils::parse_key_value;
use anyhow::Result;
use clap::{App, Arg, ArgMatches};
use std::{collections::HashMap, path::PathBuf};
use onefuzz::jitter::delay_with_jitter;
use onefuzz::{blob::BlobContainerUrl, monitor::DirectoryMonitor, syncdir::SyncedDir};
use reqwest::Url;
use std::{
collections::HashMap,
path::{Path, PathBuf},
time::Duration,
};
use uuid::Uuid;
use backoff::{future::retry, Error as BackoffError, ExponentialBackoff};
use path_absolutize::Absolutize;
use std::task::Poll;
pub const SETUP_DIR: &str = "setup_dir";
pub const INPUTS_DIR: &str = "inputs_dir";
pub const CRASHES_DIR: &str = "crashes_dir";
@ -33,46 +43,30 @@ pub const GENERATOR_EXE: &str = "generator_exe";
pub const GENERATOR_ENV: &str = "generator_env";
pub const GENERATOR_OPTIONS: &str = "generator_options";
pub const ANALYZER_EXE: &str = "analyzer_exe";
pub const ANALYZER_OPTIONS: &str = "analyzer_options";
pub const ANALYZER_ENV: &str = "analyzer_env";
pub const ANALYSIS_DIR: &str = "analysis_dir";
pub const ANALYSIS_INPUTS: &str = "analysis_inputs";
pub const ANALYSIS_UNIQUE_INPUTS: &str = "analysis_unique_inputs";
pub const PRESERVE_EXISTING_OUTPUTS: &str = "preserve_existing_outputs";
const WAIT_FOR_MAX_WAIT: Duration = Duration::from_secs(10);
const WAIT_FOR_DIR_DELAY: Duration = Duration::from_secs(1);
pub enum CmdType {
Target,
Generator,
// Supervisor,
}
pub fn add_cmd_options(
cmd_type: CmdType,
exe: bool,
arg: bool,
env: bool,
mut app: App<'static, 'static>,
) -> App<'static, 'static> {
let (exe_name, env_name, arg_name) = match cmd_type {
CmdType::Target => (TARGET_EXE, TARGET_ENV, TARGET_OPTIONS),
// CmdType::Supervisor => (SUPERVISOR_EXE, SUPERVISOR_ENV, SUPERVISOR_OPTIONS),
CmdType::Generator => (GENERATOR_EXE, GENERATOR_ENV, GENERATOR_OPTIONS),
};
if exe {
app = app.arg(Arg::with_name(exe_name).takes_value(true).required(true));
pub fn get_hash_map(args: &clap::ArgMatches<'_>, name: &str) -> Result<HashMap<String, String>> {
let mut env = HashMap::new();
for opt in args.values_of_lossy(name).unwrap_or_default() {
let (k, v) = parse_key_value(opt)?;
env.insert(k, v);
}
if env {
app = app.arg(
Arg::with_name(env_name)
.long(env_name)
.takes_value(true)
.multiple(true),
)
}
if arg {
app = app.arg(
Arg::with_name(arg_name)
.long(arg_name)
.takes_value(true)
.value_delimiter(" ")
.help("Use a quoted string with space separation to denote multiple arguments"),
)
}
app
Ok(env)
}
pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches<'_>) -> Result<String> {
@ -105,13 +99,7 @@ pub fn get_cmd_env(
// CmdType::Supervisor => SUPERVISOR_ENV,
CmdType::Generator => GENERATOR_ENV,
};
let mut env = HashMap::new();
for opt in args.values_of_lossy(env_name).unwrap_or_default() {
let (k, v) = parse_key_value(opt)?;
env.insert(k, v);
}
Ok(env)
get_hash_map(args, env_name)
}
pub fn add_common_config(app: App<'static, 'static>) -> App<'static, 'static> {
@ -142,17 +130,56 @@ pub fn add_common_config(app: App<'static, 'static>) -> App<'static, 'static> {
}
fn get_uuid(name: &str, args: &ArgMatches<'_>) -> Result<Uuid> {
match value_t!(args, name, String) {
Ok(x) => Uuid::parse_str(&x)
.map_err(|x| format_err!("invalid {}. uuid expected. {})", name, x)),
Err(_) => Ok(Uuid::nil()),
}
value_t!(args, name, String).map(|x| {
Uuid::parse_str(&x).map_err(|x| format_err!("invalid {}. uuid expected. {})", name, x))
})?
}
pub fn get_synced_dirs(
name: &str,
job_id: Uuid,
task_id: Uuid,
args: &ArgMatches<'_>,
) -> Result<Vec<SyncedDir>> {
let current_dir = std::env::current_dir()?;
let dirs: Result<Vec<SyncedDir>> = value_t!(args, name, PathBuf)?
.iter()
.enumerate()
.map(|(index, remote_path)| {
let path = PathBuf::from(remote_path);
let remote_path = path.absolutize()?;
let remote_url = Url::from_file_path(remote_path).expect("invalid file path");
let remote_blob_url = BlobContainerUrl::new(remote_url).expect("invalid url");
let path = current_dir.join(format!("{}/{}/{}_{}", job_id, task_id, name, index));
Ok(SyncedDir {
url: remote_blob_url,
path,
})
})
.collect();
Ok(dirs?)
}
pub fn get_synced_dir(
name: &str,
job_id: Uuid,
task_id: Uuid,
args: &ArgMatches<'_>,
) -> Result<SyncedDir> {
let remote_path = value_t!(args, name, PathBuf)?.absolutize()?.into_owned();
let remote_url = Url::from_file_path(remote_path).map_err(|_| anyhow!("invalid file path"))?;
let remote_blob_url = BlobContainerUrl::new(remote_url)?;
let path = std::env::current_dir()?.join(format!("{}/{}/{}", job_id, task_id, name));
Ok(SyncedDir {
url: remote_blob_url,
path,
})
}
pub fn build_common_config(args: &ArgMatches<'_>) -> Result<CommonConfig> {
let job_id = get_uuid("job_id", args)?;
let task_id = get_uuid("task_id", args)?;
let instance_id = get_uuid("instance_id", args)?;
let job_id = get_uuid("job_id", args).unwrap_or_else(|_| Uuid::nil());
let task_id = get_uuid("task_id", args).unwrap_or_else(|_| Uuid::new_v4());
let instance_id = get_uuid("instance_id", args).unwrap_or_else(|_| Uuid::nil());
let setup_dir = if args.is_present(SETUP_DIR) {
value_t!(args, SETUP_DIR, PathBuf)?
@ -174,3 +201,67 @@ pub fn build_common_config(args: &ArgMatches<'_>) -> Result<CommonConfig> {
};
Ok(config)
}
/// Information about a local path being monitored
/// A new notification will be received on the queue url
/// For each new file added to the directory
pub struct DirectoryMonitorQueue {
pub directory_path: PathBuf,
pub queue_client: storage_queue::QueueClient,
pub handle: tokio::task::JoinHandle<Result<()>>,
}
impl DirectoryMonitorQueue {
pub async fn start_monitoring(directory_path: impl AsRef<Path>) -> Result<Self> {
let directory_path = PathBuf::from(directory_path.as_ref());
let directory_path_clone = directory_path.clone();
let queue_client = storage_queue::QueueClient::Channel(
storage_queue::local_queue::ChannelQueueClient::new()?,
);
let queue = queue_client.clone();
let handle: tokio::task::JoinHandle<Result<()>> = tokio::spawn(async move {
let mut monitor = DirectoryMonitor::new(directory_path_clone.clone());
monitor.start()?;
loop {
match monitor.poll_file() {
Poll::Ready(Some(file_path)) => {
let file_url = Url::from_file_path(file_path)
.map_err(|_| anyhow!("invalid file path"))?;
queue.enqueue(file_url).await?;
}
Poll::Ready(None) => break,
Poll::Pending => delay_with_jitter(Duration::from_secs(1)).await,
}
}
Ok(())
});
Ok(DirectoryMonitorQueue {
directory_path,
queue_client,
handle,
})
}
}
pub async fn wait_for_dir(path: impl AsRef<Path>) -> Result<()> {
let op = || async {
if path.as_ref().exists() {
Ok(())
} else {
Err(BackoffError::Transient(anyhow::anyhow!(
"path '{:?}' does not exisit",
path.as_ref()
)))
}
};
retry(
ExponentialBackoff {
max_elapsed_time: Some(WAIT_FOR_MAX_WAIT),
max_interval: WAIT_FOR_DIR_DELAY,
..ExponentialBackoff::default()
},
op,
)
.await
}

View File

@ -0,0 +1,104 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use crate::{
local::common::{
build_common_config, get_cmd_arg, get_cmd_exe, get_hash_map, get_synced_dir, CmdType,
ANALYSIS_DIR, ANALYZER_ENV, ANALYZER_EXE, ANALYZER_OPTIONS, CRASHES_DIR, NO_REPRO_DIR,
REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TOOLS_DIR, UNIQUE_REPORTS_DIR,
},
tasks::{
analysis::generic::{run as run_analysis, Config},
config::CommonConfig,
},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use storage_queue::QueueClient;
pub fn build_analysis_config(
args: &clap::ArgMatches<'_>,
input_queue: Option<QueueClient>,
common: CommonConfig,
) -> Result<Config> {
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_options = get_cmd_arg(CmdType::Target, args);
let analyzer_exe = value_t!(args, ANALYZER_EXE, String)?;
let analyzer_options = args.values_of_lossy(ANALYZER_OPTIONS).unwrap_or_default();
let analyzer_env = get_hash_map(args, ANALYZER_ENV)?;
let analysis = get_synced_dir(ANALYSIS_DIR, common.job_id, common.task_id, args)?;
let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)?;
let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args).ok();
let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args).ok();
let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args).ok();
let unique_reports =
get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args).ok();
let config = Config {
target_exe,
target_options,
crashes,
input_queue,
analyzer_exe,
analyzer_options,
analyzer_env,
analysis,
tools,
common,
reports,
unique_reports,
no_repro,
};
Ok(config)
}
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let common = build_common_config(args)?;
let config = build_analysis_config(args, None, common)?;
run_analysis(config).await
}
pub fn build_shared_args() -> Vec<Arg<'static, 'static>> {
vec![
Arg::with_name(TARGET_EXE)
.long(TARGET_EXE)
.takes_value(true)
.required(true),
Arg::with_name(TARGET_ENV)
.long(TARGET_ENV)
.takes_value(true)
.multiple(true),
Arg::with_name(TARGET_OPTIONS)
.default_value("{input}")
.long(TARGET_OPTIONS)
.takes_value(true)
.value_delimiter(" ")
.help("Use a quoted string with space separation to denote multiple arguments"),
Arg::with_name(CRASHES_DIR)
.long(CRASHES_DIR)
.takes_value(true)
.required(true),
Arg::with_name(ANALYZER_EXE)
.takes_value(true)
.required(true),
Arg::with_name(ANALYZER_OPTIONS)
.takes_value(true)
.value_delimiter(" ")
.help("Use a quoted string with space separation to denote multiple arguments"),
Arg::with_name(ANALYZER_ENV)
.takes_value(true)
.multiple(true),
Arg::with_name(ANALYSIS_DIR)
.takes_value(true)
.required(true),
Arg::with_name(TOOLS_DIR).takes_value(true).required(false),
]
}
pub fn args(name: &'static str) -> App<'static, 'static> {
SubCommand::with_name(name)
.about("execute a local-only generic analysis")
.args(&build_shared_args())
}

View File

@ -3,33 +3,43 @@
use crate::{
local::common::{
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, CmdType, CHECK_ASAN_LOG,
CHECK_RETRY_COUNT, CRASHES_DIR, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV,
TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR, DISABLE_CHECK_QUEUE, NO_REPRO_DIR,
REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
},
tasks::{
config::CommonConfig,
report::generic::{Config, ReportTask},
},
tasks::report::generic::{Config, ReportTask},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use std::path::PathBuf;
use storage_queue::QueueClient;
pub fn build_report_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
pub fn build_report_config(
args: &clap::ArgMatches<'_>,
input_queue: Option<QueueClient>,
common: CommonConfig,
) -> Result<Config> {
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_env = get_cmd_env(CmdType::Target, args)?;
let target_options = get_cmd_arg(CmdType::Target, args);
let crashes = Some(value_t!(args, CRASHES_DIR, PathBuf)?.into());
let reports = if args.is_present(REPORTS_DIR) {
Some(value_t!(args, REPORTS_DIR, PathBuf)?).map(|x| x.into())
} else {
None
};
let no_repro = if args.is_present(NO_REPRO_DIR) {
Some(value_t!(args, NO_REPRO_DIR, PathBuf)?).map(|x| x.into())
} else {
None
};
let unique_reports = Some(value_t!(args, UNIQUE_REPORTS_DIR, PathBuf)?.into());
let crashes = Some(get_synced_dir(
CRASHES_DIR,
common.job_id,
common.task_id,
args,
)?);
let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args).ok();
let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args).ok();
let unique_reports = Some(get_synced_dir(
UNIQUE_REPORTS_DIR,
common.job_id,
common.task_id,
args,
)?);
let target_timeout = value_t!(args, TARGET_TIMEOUT, u64).ok();
@ -38,8 +48,6 @@ pub fn build_report_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
let check_asan_log = args.is_present(CHECK_ASAN_LOG);
let check_debugger = !args.is_present("disable_check_debugger");
let common = build_common_config(args)?;
let config = Config {
target_exe,
target_env,
@ -50,7 +58,7 @@ pub fn build_report_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
check_retry_count,
check_queue,
crashes,
input_queue: None,
input_queue,
no_repro,
reports,
unique_reports,
@ -61,8 +69,9 @@ pub fn build_report_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
}
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let config = build_report_config(args)?;
ReportTask::new(config).local_run().await
let common = build_common_config(args)?;
let config = build_report_config(args, None, common)?;
ReportTask::new(config).managed_run().await
}
pub fn build_shared_args() -> Vec<Arg<'static, 'static>> {

View File

@ -3,19 +3,21 @@
use crate::{
local::common::{
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, CmdType, CHECK_ASAN_LOG,
CHECK_RETRY_COUNT, CRASHES_DIR, GENERATOR_ENV, GENERATOR_EXE, GENERATOR_OPTIONS,
READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT,
TOOLS_DIR,
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir,
get_synced_dirs, CmdType, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR, GENERATOR_ENV,
GENERATOR_EXE, GENERATOR_OPTIONS, READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE,
TARGET_OPTIONS, TARGET_TIMEOUT, TOOLS_DIR,
},
tasks::{
config::CommonConfig,
fuzz::generator::{Config, GeneratorTask},
},
tasks::fuzz::generator::{Config, GeneratorTask},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use std::path::PathBuf;
pub fn build_fuzz_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
let crashes = value_t!(args, CRASHES_DIR, PathBuf)?.into();
pub fn build_fuzz_config(args: &clap::ArgMatches<'_>, common: CommonConfig) -> Result<Config> {
let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?;
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_options = get_cmd_arg(CmdType::Target, args);
let target_env = get_cmd_env(CmdType::Target, args)?;
@ -23,11 +25,7 @@ pub fn build_fuzz_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
let generator_exe = get_cmd_exe(CmdType::Generator, args)?;
let generator_options = get_cmd_arg(CmdType::Generator, args);
let generator_env = get_cmd_env(CmdType::Generator, args)?;
let readonly_inputs = values_t!(args, READONLY_INPUTS, PathBuf)?
.iter()
.map(|x| x.to_owned().into())
.collect();
let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)?;
let rename_output = args.is_present(RENAME_OUTPUT);
let check_asan_log = args.is_present(CHECK_ASAN_LOG);
@ -35,14 +33,10 @@ pub fn build_fuzz_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
let check_retry_count = value_t!(args, CHECK_RETRY_COUNT, u64)?;
let target_timeout = Some(value_t!(args, TARGET_TIMEOUT, u64)?);
let tools = if args.is_present(TOOLS_DIR) {
Some(value_t!(args, TOOLS_DIR, PathBuf)?.into())
} else {
None
};
let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args).ok();
let ensemble_sync_delay = None;
let common = build_common_config(args)?;
let config = Config {
tools,
generator_exe,
@ -66,7 +60,8 @@ pub fn build_fuzz_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
}
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let config = build_fuzz_config(args)?;
let common = build_common_config(args)?;
let config = build_fuzz_config(args, common)?;
GeneratorTask::new(config).run().await
}

View File

@ -3,46 +3,102 @@
use crate::{
local::{
common::COVERAGE_DIR,
common::{
build_common_config, wait_for_dir, DirectoryMonitorQueue, ANALYZER_EXE, COVERAGE_DIR,
UNIQUE_REPORTS_DIR,
},
generic_analysis::build_analysis_config,
libfuzzer_coverage::{build_coverage_config, build_shared_args as build_coverage_args},
libfuzzer_crash_report::{build_report_config, build_shared_args as build_crash_args},
libfuzzer_fuzz::{build_fuzz_config, build_shared_args as build_fuzz_args},
},
tasks::{
analysis::generic::run as run_analysis, config::CommonConfig,
coverage::libfuzzer_coverage::CoverageTask, fuzz::libfuzzer_fuzz::LibFuzzerFuzzTask,
report::libfuzzer_report::ReportTask,
},
};
use anyhow::Result;
use clap::{App, SubCommand};
use onefuzz::utils::try_wait_all_join_handles;
use std::collections::HashSet;
use tokio::task::spawn;
use uuid::Uuid;
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let fuzz_config = build_fuzz_config(args)?;
let common = build_common_config(args)?;
let fuzz_config = build_fuzz_config(args, common.clone())?;
let crash_dir = fuzz_config
.crashes
.url
.as_file_path()
.expect("invalid crash dir remote location");
let fuzzer = LibFuzzerFuzzTask::new(fuzz_config)?;
fuzzer.check_libfuzzer().await?;
let fuzz_task = spawn(async move { fuzzer.run().await });
let mut task_handles = vec![];
let report_config = build_report_config(args)?;
let report = ReportTask::new(report_config);
let report_task = spawn(async move { report.local_run().await });
let fuzz_task = spawn(async move { fuzzer.managed_run().await });
wait_for_dir(&crash_dir).await?;
task_handles.push(fuzz_task);
if args.is_present(UNIQUE_REPORTS_DIR) {
let crash_report_input_monitor =
DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?;
let report_config = build_report_config(
args,
Some(crash_report_input_monitor.queue_client),
CommonConfig {
task_id: Uuid::new_v4(),
..common.clone()
},
)?;
let mut report = ReportTask::new(report_config);
let report_task = spawn(async move { report.managed_run().await });
task_handles.push(report_task);
task_handles.push(crash_report_input_monitor.handle);
}
if args.is_present(COVERAGE_DIR) {
let coverage_config = build_coverage_config(args, true)?;
let coverage = CoverageTask::new(coverage_config);
let coverage_task = spawn(async move { coverage.local_run().await });
let coverage_input_monitor =
DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?;
let coverage_config = build_coverage_config(
args,
true,
Some(coverage_input_monitor.queue_client),
CommonConfig {
task_id: Uuid::new_v4(),
..common.clone()
},
)?;
let mut coverage = CoverageTask::new(coverage_config);
let coverage_task = spawn(async move { coverage.managed_run().await });
let result = tokio::try_join!(fuzz_task, report_task, coverage_task)?;
result.0?;
result.1?;
result.2?;
} else {
let result = tokio::try_join!(fuzz_task, report_task)?;
result.0?;
result.1?;
task_handles.push(coverage_task);
task_handles.push(coverage_input_monitor.handle);
}
if args.is_present(ANALYZER_EXE) {
let analysis_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?;
let analysis_config = build_analysis_config(
args,
Some(analysis_input_monitor.queue_client),
CommonConfig {
task_id: Uuid::new_v4(),
..common
},
)?;
let analysis_task = spawn(async move { run_analysis(analysis_config).await });
task_handles.push(analysis_task);
task_handles.push(analysis_input_monitor.handle);
}
try_wait_all_join_handles(task_handles).await?;
Ok(())
}

View File

@ -3,39 +3,49 @@
use crate::{
local::common::{
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, CmdType, CHECK_FUZZER_HELP,
COVERAGE_DIR, INPUTS_DIR, READONLY_INPUTS, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir,
get_synced_dirs, CmdType, CHECK_FUZZER_HELP, COVERAGE_DIR, INPUTS_DIR, READONLY_INPUTS,
TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
},
tasks::{
config::CommonConfig,
coverage::libfuzzer_coverage::{Config, CoverageTask},
},
tasks::coverage::libfuzzer_coverage::{Config, CoverageTask},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use std::path::PathBuf;
use storage_queue::QueueClient;
pub fn build_coverage_config(args: &clap::ArgMatches<'_>, local_job: bool) -> Result<Config> {
pub fn build_coverage_config(
args: &clap::ArgMatches<'_>,
local_job: bool,
input_queue: Option<QueueClient>,
common: CommonConfig,
) -> Result<Config> {
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_env = get_cmd_env(CmdType::Target, args)?;
let target_options = get_cmd_arg(CmdType::Target, args);
let readonly_inputs = if local_job {
vec![value_t!(args, INPUTS_DIR, PathBuf)?.into()]
vec![get_synced_dir(
INPUTS_DIR,
common.job_id,
common.task_id,
args,
)?]
} else {
values_t!(args, READONLY_INPUTS, PathBuf)?
.iter()
.map(|x| x.to_owned().into())
.collect()
get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)?
};
let coverage = value_t!(args, COVERAGE_DIR, PathBuf)?.into();
let coverage = get_synced_dir(COVERAGE_DIR, common.job_id, common.task_id, args)?;
let check_fuzzer_help = args.is_present(CHECK_FUZZER_HELP);
let common = build_common_config(args)?;
let config = Config {
target_exe,
target_env,
target_options,
check_fuzzer_help,
input_queue: None,
input_queue,
readonly_inputs,
coverage,
common,
@ -45,10 +55,11 @@ pub fn build_coverage_config(args: &clap::ArgMatches<'_>, local_job: bool) -> Re
}
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let config = build_coverage_config(args, false)?;
let common = build_common_config(args)?;
let config = build_coverage_config(args, false, None, common)?;
let task = CoverageTask::new(config);
task.local_run().await
let mut task = CoverageTask::new(config);
task.managed_run().await
}
pub fn build_shared_args(local_job: bool) -> Vec<Arg<'static, 'static>> {

View File

@ -3,41 +3,46 @@
use crate::{
local::common::{
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, CmdType, CHECK_FUZZER_HELP,
CHECK_RETRY_COUNT, CRASHES_DIR, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV,
TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, CRASHES_DIR, DISABLE_CHECK_QUEUE, NO_REPRO_DIR,
REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
},
tasks::{
config::CommonConfig,
report::libfuzzer_report::{Config, ReportTask},
},
tasks::report::libfuzzer_report::{Config, ReportTask},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use std::path::PathBuf;
use storage_queue::QueueClient;
pub fn build_report_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
pub fn build_report_config(
args: &clap::ArgMatches<'_>,
input_queue: Option<QueueClient>,
common: CommonConfig,
) -> Result<Config> {
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_env = get_cmd_env(CmdType::Target, args)?;
let target_options = get_cmd_arg(CmdType::Target, args);
let crashes = Some(value_t!(args, CRASHES_DIR, PathBuf)?.into());
let reports = if args.is_present(REPORTS_DIR) {
Some(value_t!(args, REPORTS_DIR, PathBuf)?).map(|x| x.into())
} else {
None
};
let no_repro = if args.is_present(NO_REPRO_DIR) {
Some(value_t!(args, NO_REPRO_DIR, PathBuf)?).map(|x| x.into())
} else {
None
};
let unique_reports = Some(value_t!(args, UNIQUE_REPORTS_DIR, PathBuf)?.into());
let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args).ok();
let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args).ok();
let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args).ok();
let unique_reports =
get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args).ok();
let target_timeout = value_t!(args, TARGET_TIMEOUT, u64).ok();
let check_retry_count = value_t!(args, CHECK_RETRY_COUNT, u64)?;
let check_queue = !args.is_present(DISABLE_CHECK_QUEUE);
let check_fuzzer_help = args.is_present(CHECK_FUZZER_HELP);
let common = build_common_config(args)?;
let crashes = if input_queue.is_none() { crashes } else { None };
let config = Config {
target_exe,
target_env,
@ -45,7 +50,7 @@ pub fn build_report_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
target_timeout,
check_retry_count,
check_fuzzer_help,
input_queue: None,
input_queue,
check_queue,
crashes,
reports,
@ -57,8 +62,9 @@ pub fn build_report_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
}
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let config = build_report_config(args)?;
ReportTask::new(config).local_run().await
let common = build_common_config(args)?;
let config = build_report_config(args, None, common)?;
ReportTask::new(config).managed_run().await
}
pub fn build_shared_args() -> Vec<Arg<'static, 'static>> {

View File

@ -3,20 +3,23 @@
use crate::{
local::common::{
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, CmdType, CHECK_FUZZER_HELP,
CRASHES_DIR, INPUTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_WORKERS,
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
CHECK_FUZZER_HELP, CRASHES_DIR, INPUTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
TARGET_WORKERS,
},
tasks::{
config::CommonConfig,
fuzz::libfuzzer_fuzz::{Config, LibFuzzerFuzzTask},
},
tasks::fuzz::libfuzzer_fuzz::{Config, LibFuzzerFuzzTask},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use std::path::PathBuf;
const DISABLE_EXPECT_CRASH_ON_FAILURE: &str = "disable_expect_crash_on_failure";
pub fn build_fuzz_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
let crashes = value_t!(args, CRASHES_DIR, PathBuf)?.into();
let inputs = value_t!(args, INPUTS_DIR, PathBuf)?.into();
pub fn build_fuzz_config(args: &clap::ArgMatches<'_>, common: CommonConfig) -> Result<Config> {
let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?;
let inputs = get_synced_dir(INPUTS_DIR, common.job_id, common.task_id, args)?;
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_env = get_cmd_env(CmdType::Target, args)?;
@ -28,7 +31,7 @@ pub fn build_fuzz_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
let expect_crash_on_failure = !args.is_present(DISABLE_EXPECT_CRASH_ON_FAILURE);
let ensemble_sync_delay = None;
let common = build_common_config(args)?;
let config = Config {
inputs,
readonly_inputs,
@ -47,7 +50,8 @@ pub fn build_fuzz_config(args: &clap::ArgMatches<'_>) -> Result<Config> {
}
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let config = build_fuzz_config(args)?;
let common = build_common_config(args)?;
let config = build_fuzz_config(args, common)?;
LibFuzzerFuzzTask::new(config)?.run().await
}

View File

@ -0,0 +1,82 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use crate::{
local::common::{
build_common_config, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir,
get_synced_dirs, CmdType, ANALYSIS_INPUTS, ANALYSIS_UNIQUE_INPUTS, CHECK_FUZZER_HELP,
INPUTS_DIR, PRESERVE_EXISTING_OUTPUTS, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
},
tasks::{
config::CommonConfig,
merge::libfuzzer_merge::{spawn, Config},
},
};
use anyhow::Result;
use clap::{App, Arg, SubCommand};
use storage_queue::QueueClient;
pub fn build_merge_config(
args: &clap::ArgMatches<'_>,
input_queue: Option<QueueClient>,
common: CommonConfig,
) -> Result<Config> {
let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
let target_env = get_cmd_env(CmdType::Target, args)?;
let target_options = get_cmd_arg(CmdType::Target, args);
let check_fuzzer_help = args.is_present(CHECK_FUZZER_HELP);
let inputs = get_synced_dirs(ANALYSIS_INPUTS, common.job_id, common.task_id, args)?;
let unique_inputs =
get_synced_dir(ANALYSIS_UNIQUE_INPUTS, common.job_id, common.task_id, args)?;
let preserve_existing_outputs = value_t!(args, PRESERVE_EXISTING_OUTPUTS, bool)?;
let config = Config {
target_exe,
target_env,
target_options,
check_fuzzer_help,
input_queue,
common,
inputs,
unique_inputs,
preserve_existing_outputs,
};
Ok(config)
}
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let common = build_common_config(args)?;
let config = build_merge_config(args, None, common)?;
spawn(std::sync::Arc::new(config)).await
}
pub fn build_shared_args() -> Vec<Arg<'static, 'static>> {
vec![
Arg::with_name(TARGET_EXE)
.long(TARGET_EXE)
.takes_value(true)
.required(true),
Arg::with_name(TARGET_ENV)
.long(TARGET_ENV)
.takes_value(true)
.multiple(true),
Arg::with_name(TARGET_OPTIONS)
.long(TARGET_OPTIONS)
.takes_value(true)
.value_delimiter(" ")
.help("Use a quoted string with space separation to denote multiple arguments"),
Arg::with_name(CHECK_FUZZER_HELP)
.takes_value(false)
.long(CHECK_FUZZER_HELP),
Arg::with_name(INPUTS_DIR)
.long(INPUTS_DIR)
.takes_value(true)
.multiple(true),
]
}
pub fn args(name: &'static str) -> App<'static, 'static> {
SubCommand::with_name(name)
.about("execute a local-only libfuzzer crash report task")
.args(&build_shared_args())
}

View File

@ -3,10 +3,12 @@
pub mod cmd;
pub mod common;
pub mod generic_analysis;
pub mod generic_crash_report;
pub mod generic_generator;
pub mod libfuzzer;
pub mod libfuzzer_coverage;
pub mod libfuzzer_crash_report;
pub mod libfuzzer_fuzz;
pub mod libfuzzer_merge;
pub mod radamsa;

View File

@ -3,28 +3,48 @@
use crate::{
local::{
common::{build_common_config, DirectoryMonitorQueue},
generic_crash_report::{build_report_config, build_shared_args as build_crash_args},
generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args},
},
tasks::{fuzz::generator::GeneratorTask, report::generic::ReportTask},
tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask},
};
use anyhow::Result;
use clap::{App, SubCommand};
use onefuzz::utils::try_wait_all_join_handles;
use std::collections::HashSet;
use tokio::task::spawn;
use uuid::Uuid;
pub async fn run(args: &clap::ArgMatches<'_>) -> Result<()> {
let fuzz_config = build_fuzz_config(args)?;
let common = build_common_config(args)?;
let fuzz_config = build_fuzz_config(args, common.clone())?;
let crash_dir = fuzz_config
.crashes
.url
.as_file_path()
.expect("invalid crash dir remote location");
let fuzzer = GeneratorTask::new(fuzz_config);
let fuzz_task = spawn(async move { fuzzer.run().await });
let report_config = build_report_config(args)?;
let report = ReportTask::new(report_config);
let report_task = spawn(async move { report.local_run().await });
let crash_report_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?;
let report_config = build_report_config(
args,
Some(crash_report_input_monitor.queue_client),
CommonConfig {
task_id: Uuid::new_v4(),
..common
},
)?;
let report_task = spawn(async move { ReportTask::new(report_config).managed_run().await });
let result = tokio::try_join!(fuzz_task, report_task)?;
result.0?;
result.1?;
try_wait_all_join_handles(vec![
fuzz_task,
report_task,
crash_report_input_monitor.handle,
])
.await?;
Ok(())
}

View File

@ -13,14 +13,12 @@ use anyhow::Result;
use clap::{App, ArgMatches, SubCommand};
use std::io::{stdout, Write};
mod debug;
mod local;
mod managed;
mod tasks;
const LICENSE_CMD: &str = "licenses";
const LOCAL_CMD: &str = "local";
const DEBUG_CMD: &str = "debug";
const MANAGED_CMD: &str = "managed";
fn main() -> Result<()> {
@ -37,7 +35,6 @@ fn main() -> Result<()> {
.version(built_version.as_str())
.subcommand(managed::cmd::args(MANAGED_CMD))
.subcommand(local::cmd::args(LOCAL_CMD))
.subcommand(debug::cmd::args(DEBUG_CMD))
.subcommand(SubCommand::with_name(LICENSE_CMD).about("display third-party licenses"));
let matches = app.get_matches();
@ -49,7 +46,6 @@ fn main() -> Result<()> {
async fn run(args: ArgMatches<'_>) -> Result<()> {
match args.subcommand() {
(LICENSE_CMD, Some(_)) => licenses(),
(DEBUG_CMD, Some(sub)) => debug::cmd::run(sub).await,
(LOCAL_CMD, Some(sub)) => local::cmd::run(sub).await,
(MANAGED_CMD, Some(sub)) => managed::cmd::run(sub).await,
_ => {

View File

@ -6,12 +6,14 @@ use crate::tasks::{
};
use anyhow::{Context, Result};
use futures::stream::StreamExt;
use onefuzz::{az_copy, blob::url::BlobUrl};
use onefuzz::{az_copy, blob::url::BlobUrl, fs::SyncPath};
use onefuzz::{
expand::Expand, fs::set_executable, fs::OwnedDir, jitter::delay_with_jitter,
process::monitor_process, syncdir::SyncedDir,
expand::Expand,
fs::{copy, set_executable, OwnedDir},
jitter::delay_with_jitter,
process::monitor_process,
syncdir::SyncedDir,
};
use reqwest::Url;
use serde::Deserialize;
use std::process::Stdio;
use std::{
@ -31,7 +33,7 @@ pub struct Config {
pub target_exe: PathBuf,
pub target_options: Vec<String>,
pub input_queue: Option<Url>,
pub input_queue: Option<QueueClient>,
pub crashes: Option<SyncedDir>,
pub analysis: SyncedDir,
@ -45,7 +47,7 @@ pub struct Config {
pub common: CommonConfig,
}
pub async fn spawn(config: Config) -> Result<()> {
pub async fn run(config: Config) -> Result<()> {
let tmp_dir = PathBuf::from(format!("./{}/tmp", config.common.task_id));
let tmp = OwnedDir::new(tmp_dir);
tmp.reset().await?;
@ -120,9 +122,8 @@ async fn run_existing(config: &Config, reports_dir: &Option<PathBuf>) -> Result<
async fn already_checked(config: &Config, input: &BlobUrl) -> Result<bool> {
let result = if let Some(crashes) = &config.crashes {
let url = crashes.try_url()?;
url.account() == input.account()
&& url.container() == input.container()
crashes.url.account() == input.account()
&& crashes.url.container() == input.container()
&& crashes.path.join(input.name()).exists()
} else {
false
@ -137,13 +138,13 @@ async fn poll_inputs(
reports_dir: &Option<PathBuf>,
) -> Result<()> {
let heartbeat = config.common.init_heartbeat().await?;
if let Some(queue) = &config.input_queue {
let mut input_queue = QueueClient::new(queue.clone());
if let Some(input_queue) = &config.input_queue {
loop {
heartbeat.alive();
if let Some(message) = input_queue.pop().await? {
let input_url = match BlobUrl::parse(str::from_utf8(message.data())?) {
let input_url = message.parse(|data| BlobUrl::parse(str::from_utf8(data)?));
let input_url = match input_url {
Ok(url) => url,
Err(err) => {
error!("could not parse input URL from queue message: {}", err);
@ -152,15 +153,12 @@ async fn poll_inputs(
};
if !already_checked(&config, &input_url).await? {
let file_name = input_url.name();
let mut destination_path = PathBuf::from(tmp_dir.path());
destination_path.push(file_name);
az_copy::copy(input_url.url().as_ref(), &destination_path, false).await?;
let destination_path = _copy(input_url, &tmp_dir).await?;
run_tool(destination_path, &config, &reports_dir).await?;
config.analysis.sync_push().await?
}
input_queue.delete(message).await?;
message.delete().await?;
} else {
warn!("no new candidate inputs found, sleeping");
delay_with_jitter(EMPTY_QUEUE_DELAY).await;
@ -171,6 +169,26 @@ async fn poll_inputs(
Ok(())
}
async fn _copy(input_url: BlobUrl, destination_folder: &OwnedDir) -> Result<PathBuf> {
let file_name = input_url.name();
let mut destination_path = PathBuf::from(destination_folder.path());
destination_path.push(file_name);
match input_url {
BlobUrl::AzureBlob(input_url) => {
az_copy::copy(input_url.as_ref(), destination_path.clone(), false).await?
}
BlobUrl::LocalFile(path) => {
copy(
SyncPath::file(path),
SyncPath::dir(destination_path.clone()),
false,
)
.await?
}
}
Ok(destination_path)
}
pub async fn run_tool(
input: impl AsRef<Path>,
config: &Config,
@ -197,13 +215,13 @@ pub async fn run_tool(
tester.reports_dir(&reports_dir)
})
.set_optional_ref(&config.crashes, |tester, crashes| {
if let Some(url) = &crashes.url {
tester
.crashes_account(&url.account())
.crashes_container(&url.container())
} else {
tester
}
tester
.set_optional_ref(&crashes.url.account(), |tester, account| {
tester.crashes_account(account)
})
.set_optional_ref(&crashes.url.container(), |tester, container| {
tester.crashes_container(container)
})
});
let analyzer_path = expand.evaluate_value(&config.analyzer_exe)?;

View File

@ -184,7 +184,7 @@ impl Config {
.await
}
Config::LibFuzzerMerge(config) => merge::libfuzzer_merge::spawn(Arc::new(config)).await,
Config::GenericAnalysis(config) => analysis::generic::spawn(config).await,
Config::GenericAnalysis(config) => analysis::generic::run(config).await,
Config::GenericGenerator(config) => {
fuzz::generator::GeneratorTask::new(config).run().await
}

View File

@ -49,7 +49,7 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
use storage_queue::Message;
use storage_queue::{Message, QueueClient};
use tokio::fs;
const TOTAL_COVERAGE: &str = "total.cov";
@ -59,7 +59,7 @@ pub struct Config {
pub target_exe: PathBuf,
pub target_env: HashMap<String, String>,
pub target_options: Vec<String>,
pub input_queue: Option<Url>,
pub input_queue: Option<QueueClient>,
pub readonly_inputs: Vec<SyncedDir>,
pub coverage: SyncedDir,
@ -93,20 +93,6 @@ impl CoverageTask {
Self { config, poller }
}
pub async fn local_run(&self) -> Result<()> {
let mut processor = CoverageProcessor::new(self.config.clone()).await?;
self.config.coverage.init().await?;
for synced_dir in &self.config.readonly_inputs {
synced_dir.init().await?;
self.record_corpus_coverage(&mut processor, &synced_dir)
.await?;
}
processor.report_total().await?;
Ok(())
}
async fn check_libfuzzer(&self) -> Result<()> {
if self.config.check_fuzzer_help {
let fuzzer = LibFuzzer::new(
@ -160,7 +146,7 @@ impl CoverageTask {
// If a queue has been provided, poll it for new coverage.
if let Some(queue) = &self.config.input_queue {
info!("polling queue for new coverage");
let callback = CallbackImpl::new(queue.clone(), processor);
let callback = CallbackImpl::new(queue.clone(), processor)?;
self.poller.run(callback).await?;
}

View File

@ -64,10 +64,8 @@ impl GeneratorTask {
pub async fn run(&self) -> Result<()> {
self.config.crashes.init().await?;
if let Some(tools) = &self.config.tools {
if tools.url.is_some() {
tools.init_pull().await?;
set_executable(&tools.path).await?;
}
tools.init_pull().await?;
set_executable(&tools.path).await?;
}
let hb_client = self.config.common.init_heartbeat().await?;
@ -207,17 +205,18 @@ mod tests {
async fn test_radamsa_linux() -> anyhow::Result<()> {
use super::{Config, GeneratorTask};
use crate::tasks::config::CommonConfig;
use onefuzz::blob::BlobContainerUrl;
use onefuzz::syncdir::SyncedDir;
use reqwest::Url;
use std::collections::HashMap;
use std::env;
use std::path::Path;
use tempfile::tempdir;
let crashes_temp = tempfile::tempdir()?;
let crashes = crashes_temp.path();
let crashes: &std::path::Path = crashes_temp.path();
let inputs_temp = tempfile::tempdir().unwrap();
let inputs = inputs_temp.path();
let inputs_temp = tempfile::tempdir()?;
let inputs: &std::path::Path = inputs_temp.path();
let input_file = inputs.join("seed.txt");
tokio::fs::write(input_file, "test").await?;
@ -234,23 +233,26 @@ mod tests {
.collect();
let radamsa_path = env::var("ONEFUZZ_TEST_RADAMSA_LINUX")?;
let radamsa_as_path = Path::new(&radamsa_path);
let radamsa_as_path = std::path::Path::new(&radamsa_path);
let radamsa_dir = radamsa_as_path.parent().unwrap();
let readonly_inputs_local = tempfile::tempdir().unwrap().path().into();
let crashes_local = tempfile::tempdir().unwrap().path().into();
let tools_local = tempfile::tempdir().unwrap().path().into();
let config = Config {
generator_exe: String::from("{tools_dir}/radamsa"),
generator_options,
readonly_inputs: vec![SyncedDir {
path: inputs.to_path_buf(),
url: None,
path: readonly_inputs_local,
url: BlobContainerUrl::parse(Url::from_directory_path(inputs).unwrap())?,
}],
crashes: SyncedDir {
path: crashes.to_path_buf(),
url: None,
path: crashes_local,
url: BlobContainerUrl::parse(Url::from_directory_path(crashes).unwrap())?,
},
tools: Some(SyncedDir {
path: radamsa_dir.to_path_buf(),
url: None,
path: tools_local,
url: BlobContainerUrl::parse(Url::from_directory_path(radamsa_dir).unwrap())?,
}),
target_exe: Default::default(),
target_env: Default::default(),

View File

@ -17,9 +17,8 @@ use onefuzz_telemetry::{
};
use serde::Deserialize;
use std::{collections::HashMap, path::PathBuf};
use tempfile::tempdir;
use tempfile::{tempdir_in, TempDir};
use tokio::{
fs::rename,
io::{AsyncBufReadExt, BufReader},
sync::mpsc,
task,
@ -126,6 +125,20 @@ impl LibFuzzerFuzzTask {
Ok(())
}
/// Creates a temporary directory in the current task directory
async fn create_local_temp_dir(&self) -> Result<TempDir> {
let task_dir = self
.config
.inputs
.path
.parent()
.ok_or_else(|| anyhow!("Invalid input path"))?;
let temp_path = task_dir.join(".temp");
tokio::fs::create_dir_all(&temp_path).await?;
let temp_dir = tempdir_in(temp_path)?;
Ok(temp_dir)
}
// The fuzzer monitor coordinates a _series_ of fuzzer runs.
//
// A run is one session of continuous fuzzing, terminated by a fuzzing error
@ -135,7 +148,7 @@ impl LibFuzzerFuzzTask {
worker_id: u64,
stats_sender: Option<&StatsSender>,
) -> Result<()> {
let local_input_dir = tempdir()?;
let local_input_dir = self.create_local_temp_dir().await?;
loop {
self.run_fuzzer(&local_input_dir.path(), worker_id, stats_sender)
.await?;
@ -165,7 +178,7 @@ impl LibFuzzerFuzzTask {
worker_id: u64,
stats_sender: Option<&StatsSender>,
) -> Result<()> {
let crash_dir = tempdir()?;
let crash_dir = self.create_local_temp_dir().await?;
let run_id = Uuid::new_v4();
debug!("starting fuzzer run, run_id = {}", run_id);
@ -235,7 +248,7 @@ impl LibFuzzerFuzzTask {
for file in &files {
if let Some(filename) = file.file_name() {
let dest = self.config.crashes.path.join(filename);
rename(file, dest).await?;
tokio::fs::rename(file, dest).await?;
}
}

View File

@ -31,7 +31,7 @@ use tokio::{
sync::Notify,
};
#[derive(Debug, Deserialize, Default)]
#[derive(Debug, Deserialize)]
pub struct SupervisorConfig {
pub inputs: SyncedDir,
pub crashes: SyncedDir,
@ -199,10 +199,11 @@ async fn start_supervisor(
.set_optional_ref(&config.common.instance_telemetry_key, |tester, key| {
tester.instance_telemetry_key(&key)
})
.set_optional_ref(&config.crashes.url, |tester, url| {
tester
.crashes_account(&url.account())
.crashes_container(&url.container())
.set_optional_ref(&config.crashes.url.account(), |tester, account| {
tester.crashes_account(account)
})
.set_optional_ref(&config.crashes.url.container(), |tester, container| {
tester.crashes_container(container)
});
let supervisor_path = expand.evaluate_value(&config.supervisor_exe)?;
@ -255,6 +256,8 @@ mod tests {
#[cfg(target_os = "linux")]
#[cfg_attr(not(feature = "integration_test"), ignore)]
async fn test_fuzzer_linux() {
use onefuzz::blob::BlobContainerUrl;
use reqwest::Url;
use std::env;
let runtime_dir = tempfile::tempdir().unwrap();
@ -277,15 +280,19 @@ mod tests {
let reports_dir = reports_dir_temp.path().into();
let fault_dir_temp = tempfile::tempdir().unwrap();
let crashes_local = tempfile::tempdir().unwrap().path().into();
let corpus_dir_local = tempfile::tempdir().unwrap().path().into();
let crashes = SyncedDir {
path: fault_dir_temp.path().into(),
url: None,
path: crashes_local,
url: BlobContainerUrl::parse(Url::from_directory_path(fault_dir_temp).unwrap())
.unwrap(),
};
let corpus_dir_temp = tempfile::tempdir().unwrap();
let corpus_dir = SyncedDir {
path: corpus_dir_temp.path().into(),
url: None,
path: corpus_dir_local,
url: BlobContainerUrl::parse(Url::from_directory_path(corpus_dir_temp).unwrap())
.unwrap(),
};
let seed_file_name = corpus_dir.path.join("seed.txt");
tokio::fs::write(seed_file_name, "xyz").await.unwrap();
@ -316,7 +323,17 @@ mod tests {
supervisor_input_marker,
target_exe,
target_options,
..Default::default()
inputs: corpus_dir.clone(),
crashes: crashes.clone(),
tools: None,
wait_for_files: None,
stats_file: None,
stats_format: None,
ensemble_sync_delay: None,
reports: None,
unique_reports: None,
no_repro: None,
common: CommonConfig::default(),
};
let process = start_supervisor(runtime_dir, &config, &crashes, &corpus_dir, reports_dir)

View File

@ -121,9 +121,7 @@ impl<M> InputPoller<M> {
to_process: &SyncedDir,
) -> Result<()> {
self.batch_dir = Some(to_process.clone());
if to_process.url.is_some() {
to_process.init_pull().await?;
}
to_process.init_pull().await?;
info!("batch processing directory: {}", to_process.path.display());
let mut read_dir = fs::read_dir(&to_process.path).await?;

View File

@ -6,26 +6,27 @@ use std::path::{Path, PathBuf};
use anyhow::Result;
use async_trait::async_trait;
use reqwest::Url;
use storage_queue::{Message, QueueClient};
use storage_queue::Message;
use storage_queue::QueueClient;
#[async_trait]
pub trait Queue<M> {
pub trait Queue<M>: Send {
async fn pop(&mut self) -> Result<Option<M>>;
async fn delete(&mut self, msg: M) -> Result<()>;
}
pub trait Parser<M> {
pub trait Parser<M>: Send {
fn parse(&mut self, msg: &M) -> Result<Url>;
}
#[async_trait]
pub trait Downloader {
pub trait Downloader: Send {
async fn download(&mut self, url: Url, dir: &Path) -> Result<PathBuf>;
}
#[async_trait]
pub trait Processor {
pub trait Processor: Send {
async fn process(&mut self, url: Option<Url>, input: &Path) -> Result<()>;
}
@ -72,9 +73,8 @@ impl<P> CallbackImpl<P>
where
P: Processor + Send,
{
pub fn new(queue_url: Url, processor: P) -> Self {
let queue = QueueClient::new(queue_url);
Self { queue, processor }
pub fn new(queue: QueueClient, processor: P) -> Result<Self> {
Ok(Self { queue, processor })
}
}
@ -88,7 +88,7 @@ where
}
async fn delete(&mut self, msg: Message) -> Result<()> {
self.queue.delete(msg).await
msg.delete().await
}
}
@ -97,9 +97,10 @@ where
P: Processor + Send,
{
fn parse(&mut self, msg: &Message) -> Result<Url> {
let text = std::str::from_utf8(msg.data())?;
let url = Url::parse(text)?;
let url = msg.parse(|data| {
let data = std::str::from_utf8(data)?;
Ok(Url::parse(data)?)
})?;
Ok(url)
}
}

View File

@ -68,7 +68,7 @@ pub async fn init_task_heartbeat(
})
.await;
},
);
)?;
Ok(hb)
}

View File

@ -56,9 +56,10 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
debug!("tmp dir reset");
utils::reset_tmp_dir(&tmp_dir).await?;
config.unique_inputs.sync_pull().await?;
let mut queue = QueueClient::new(config.input_queue.clone());
let queue = QueueClient::new(config.input_queue.clone())?;
if let Some(msg) = queue.pop().await? {
let input_url = match utils::parse_url_data(msg.data()) {
let input_url = msg.parse(utils::parse_url_data);
let input_url = match input_url {
Ok(url) => url,
Err(err) => {
error!("could not parse input URL from queue message: {}", err);
@ -74,7 +75,7 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
} else {
debug!("will delete popped message with id = {}", msg.id());
queue.delete(msg).await?;
msg.delete().await?;
debug!(
"Attempting to delete {} from the candidate container",
@ -88,7 +89,7 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
} else {
warn!("no new candidate inputs found, sleeping");
delay_with_jitter(EMPTY_QUEUE_DELAY).await;
}
};
}
}

View File

@ -34,7 +34,7 @@ pub struct Config {
pub target_exe: PathBuf,
pub target_env: HashMap<String, String>,
pub target_options: Vec<String>,
pub input_queue: Option<Url>,
pub input_queue: Option<QueueClient>,
pub inputs: Vec<SyncedDir>,
pub unique_inputs: SyncedDir,
pub preserve_existing_outputs: bool,
@ -58,10 +58,9 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
}
config.unique_inputs.init().await?;
if let Some(url) = config.input_queue.clone() {
if let Some(queue) = config.input_queue.clone() {
loop {
let queue = QueueClient::new(url.clone());
if let Err(error) = process_message(config.clone(), queue).await {
if let Err(error) = process_message(config.clone(), queue.clone()).await {
error!(
"failed to process latest message from notification queue: {}",
error
@ -85,7 +84,7 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
}
}
async fn process_message(config: Arc<Config>, mut input_queue: QueueClient) -> Result<()> {
async fn process_message(config: Arc<Config>, input_queue: QueueClient) -> Result<()> {
let hb_client = config.common.init_heartbeat().await?;
hb_client.alive();
let tmp_dir = "./tmp";
@ -93,7 +92,11 @@ async fn process_message(config: Arc<Config>, mut input_queue: QueueClient) -> R
utils::reset_tmp_dir(tmp_dir).await?;
if let Some(msg) = input_queue.pop().await? {
let input_url = match utils::parse_url_data(msg.data()) {
let input_url = msg.parse(|data| {
let data = std::str::from_utf8(data)?;
Ok(Url::parse(data)?)
});
let input_url: Url = match input_url {
Ok(url) => url,
Err(err) => {
error!("could not parse input URL from queue message: {}", err);
@ -107,7 +110,7 @@ async fn process_message(config: Arc<Config>, mut input_queue: QueueClient) -> R
debug!("will delete popped message with id = {}", msg.id());
input_queue.delete(msg).await?;
msg.delete().await?;
debug!(
"Attempting to delete {} from the candidate container",

View File

@ -3,22 +3,14 @@
use anyhow::{Context, Result};
use futures::StreamExt;
use onefuzz::{
asan::AsanLog,
blob::{BlobClient, BlobUrl},
fs::exists,
monitor::DirectoryMonitor,
syncdir::SyncedDir,
};
use onefuzz::{asan::AsanLog, blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir};
use onefuzz_telemetry::{
Event::{new_report, new_unable_to_reproduce, new_unique_report},
EventData,
};
use reqwest::{StatusCode, Url};
use reqwest_retry::SendRetry;
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use tokio::fs;
use uuid::Uuid;
#[derive(Debug, Deserialize, Serialize)]
@ -66,41 +58,12 @@ pub enum CrashTestResult {
NoRepro(NoCrash),
}
// Conditionally upload a report, if it would not be a duplicate.
async fn upload<T: Serialize>(report: &T, url: Url) -> Result<bool> {
let blob = BlobClient::new();
let result = blob
.put(url)
.json(report)
// Conditional PUT, only if-not-exists.
// https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations
.header("If-None-Match", "*")
.send_retry_default()
.await?;
Ok(result.status() == StatusCode::CREATED)
}
async fn upload_or_save_local<T: Serialize>(
report: &T,
dest_name: &str,
container: &SyncedDir,
) -> Result<bool> {
match &container.url {
Some(blob_url) => {
let url = blob_url.blob(dest_name).url();
upload(report, url).await
}
None => {
let path = container.path.join(dest_name);
if !exists(&path).await? {
let data = serde_json::to_vec(&report)?;
fs::write(path, data).await?;
Ok(true)
} else {
Ok(false)
}
}
}
container.upload(dest_name, report).await
}
impl CrashTestResult {
@ -143,8 +106,8 @@ impl CrashTestResult {
#[derive(Debug, Deserialize, Serialize)]
pub struct InputBlob {
pub account: String,
pub container: String,
pub account: Option<String>,
pub container: Option<String>,
pub name: String,
}

View File

@ -10,17 +10,14 @@ use crate::tasks::{
};
use anyhow::Result;
use async_trait::async_trait;
use futures::stream::StreamExt;
use onefuzz::{
blob::BlobUrl, input_tester::Tester, monitor::DirectoryMonitor, sha256, syncdir::SyncedDir,
};
use onefuzz::{blob::BlobUrl, input_tester::Tester, sha256, syncdir::SyncedDir};
use reqwest::Url;
use serde::Deserialize;
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use storage_queue::Message;
use storage_queue::{Message, QueueClient};
#[derive(Debug, Deserialize)]
pub struct Config {
@ -32,7 +29,7 @@ pub struct Config {
#[serde(default)]
pub target_env: HashMap<String, String>,
pub input_queue: Option<Url>,
pub input_queue: Option<QueueClient>,
pub crashes: Option<SyncedDir>,
pub reports: Option<SyncedDir>,
pub unique_reports: Option<SyncedDir>,
@ -65,30 +62,6 @@ impl ReportTask {
Self { config, poller }
}
pub async fn local_run(&self) -> Result<()> {
let mut processor = GenericReportProcessor::new(&self.config, None);
info!("Starting generic crash report task");
let crashes = match &self.config.crashes {
Some(x) => x,
None => bail!("missing crashes directory"),
};
let mut read_dir = tokio::fs::read_dir(&crashes.path).await?;
while let Some(crash) = read_dir.next().await {
processor.process(None, &crash?.path()).await?;
}
if self.config.check_queue {
let mut monitor = DirectoryMonitor::new(&crashes.path);
monitor.start()?;
while let Some(crash) = monitor.next().await {
processor.process(None, &crash).await?;
}
}
Ok(())
}
pub async fn managed_run(&mut self) -> Result<()> {
info!("Starting generic crash report task");
let heartbeat_client = self.config.common.init_heartbeat().await?;
@ -102,7 +75,7 @@ impl ReportTask {
info!("processing crashes from queue");
if self.config.check_queue {
if let Some(queue) = &self.config.input_queue {
let callback = CallbackImpl::new(queue.clone(), processor);
let callback = CallbackImpl::new(queue.clone(), processor)?;
self.poller.run(callback).await?;
}
}

View File

@ -7,10 +7,7 @@ use crate::tasks::{
};
use anyhow::{Context, Result};
use async_trait::async_trait;
use futures::stream::StreamExt;
use onefuzz::{
blob::BlobUrl, libfuzzer::LibFuzzer, monitor::DirectoryMonitor, sha256, syncdir::SyncedDir,
};
use onefuzz::{blob::BlobUrl, libfuzzer::LibFuzzer, sha256, syncdir::SyncedDir};
use reqwest::Url;
use serde::Deserialize;
use std::{
@ -18,7 +15,7 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
use storage_queue::Message;
use storage_queue::{Message, QueueClient};
#[derive(Debug, Deserialize)]
pub struct Config {
@ -27,7 +24,7 @@ pub struct Config {
// TODO: options are not yet used for crash reporting
pub target_options: Vec<String>,
pub target_timeout: Option<u64>,
pub input_queue: Option<Url>,
pub input_queue: Option<QueueClient>,
pub crashes: Option<SyncedDir>,
pub reports: Option<SyncedDir>,
pub unique_reports: Option<SyncedDir>,
@ -59,13 +56,8 @@ impl ReportTask {
Self { config, poller }
}
pub async fn local_run(&self) -> Result<()> {
let mut processor = AsanProcessor::new(self.config.clone()).await?;
let crashes = match &self.config.crashes {
Some(x) => x,
None => bail!("missing crashes directory"),
};
crashes.init().await?;
pub async fn managed_run(&mut self) -> Result<()> {
info!("Starting libFuzzer crash report task");
if let Some(unique_reports) = &self.config.unique_reports {
unique_reports.init().await?;
@ -77,30 +69,6 @@ impl ReportTask {
no_repro.init().await?;
}
let mut read_dir = tokio::fs::read_dir(&crashes.path).await.with_context(|| {
format_err!(
"unable to read crashes directory {}",
crashes.path.display()
)
})?;
while let Some(crash) = read_dir.next().await {
processor.process(None, &crash?.path()).await?;
}
if self.config.check_queue {
let mut monitor = DirectoryMonitor::new(crashes.path.clone());
monitor.start()?;
while let Some(crash) = monitor.next().await {
processor.process(None, &crash).await?;
}
}
Ok(())
}
pub async fn managed_run(&mut self) -> Result<()> {
info!("Starting libFuzzer crash report task");
let mut processor = AsanProcessor::new(self.config.clone()).await?;
if let Some(crashes) = &self.config.crashes {
@ -108,8 +76,8 @@ impl ReportTask {
}
if self.config.check_queue {
if let Some(queue) = &self.config.input_queue {
let callback = CallbackImpl::new(queue.clone(), processor);
if let Some(url) = &self.config.input_queue {
let callback = CallbackImpl::new(url.clone(), processor)?;
self.poller.run(callback).await?;
}
}

View File

@ -14,26 +14,34 @@ pub async fn download_input(input_url: Url, dst: impl AsRef<Path>) -> Result<Pat
let file_name = input_url.path_segments().unwrap().last().unwrap();
let file_path = dst.as_ref().join(file_name);
let resp = Client::new()
.get(input_url)
.send_retry_default()
.await?
.error_for_status_with_body()
.await?;
if input_url.scheme().to_lowercase() == "file" {
let input_file_path = input_url
.to_file_path()
.map_err(|_| anyhow!("Invalid file Url"))?;
fs::copy(&input_file_path, &file_path).await?;
Ok(file_path)
} else {
let resp = Client::new()
.get(input_url)
.send_retry_default()
.await?
.error_for_status_with_body()
.await?;
let body = resp.bytes().await?;
let mut body = body.as_ref();
let body = resp.bytes().await?;
let mut body = body.as_ref();
let file = fs::OpenOptions::new()
.create(true)
.write(true)
.open(&file_path)
.await?;
let mut writer = io::BufWriter::new(file);
let file = fs::OpenOptions::new()
.create(true)
.write(true)
.open(&file_path)
.await?;
let mut writer = io::BufWriter::new(file);
io::copy(&mut body, &mut writer).await?;
io::copy(&mut body, &mut writer).await?;
Ok(file_path)
Ok(file_path)
}
}
pub async fn reset_tmp_dir(tmp_dir: impl AsRef<Path>) -> Result<()> {