mirror of
https://github.com/microsoft/onefuzz.git
synced 2025-06-17 20:38:06 +00:00
refactor SyncDir and blob container url (#809)
This commit is contained in:
@ -172,11 +172,14 @@ pub fn get_synced_dirs(
|
||||
let remote_blob_url = BlobContainerUrl::new(remote_url).expect("invalid url");
|
||||
let path = current_dir.join(format!("{}/{}/{}_{}", job_id, task_id, name, index));
|
||||
Ok(SyncedDir {
|
||||
url: Some(remote_blob_url),
|
||||
path,
|
||||
remote_path: Some(remote_blob_url),
|
||||
local_path: path,
|
||||
})
|
||||
} else {
|
||||
Ok(SyncedDir { url: None, path })
|
||||
Ok(SyncedDir {
|
||||
remote_path: None,
|
||||
local_path: path,
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
@ -195,13 +198,13 @@ pub fn get_synced_dir(
|
||||
let remote_blob_url = BlobContainerUrl::new(remote_url)?;
|
||||
let path = std::env::current_dir()?.join(format!("{}/{}/{}", job_id, task_id, name));
|
||||
Ok(SyncedDir {
|
||||
url: Some(remote_blob_url),
|
||||
path,
|
||||
remote_path: Some(remote_blob_url),
|
||||
local_path: path,
|
||||
})
|
||||
} else {
|
||||
Ok(SyncedDir {
|
||||
url: None,
|
||||
path: remote_path,
|
||||
remote_path: None,
|
||||
local_path: remote_path,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ pub struct Config {
|
||||
pub async fn run(config: Config) -> Result<()> {
|
||||
let task_dir = config
|
||||
.analysis
|
||||
.path
|
||||
.local_path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("Invalid input path"))?;
|
||||
let temp_path = task_dir.join(".temp");
|
||||
@ -94,7 +94,7 @@ pub async fn run(config: Config) -> Result<()> {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
set_executable(&config.tools.path).await?;
|
||||
set_executable(&config.tools.local_path).await?;
|
||||
run_existing(&config, &reports_path).await?;
|
||||
let poller = poll_inputs(&config, tmp, &reports_path);
|
||||
|
||||
@ -114,7 +114,7 @@ async fn run_existing(config: &Config, reports_dir: &Option<PathBuf>) -> Result<
|
||||
if let Some(crashes) = &config.crashes {
|
||||
crashes.init_pull().await?;
|
||||
let mut count: u64 = 0;
|
||||
let mut read_dir = fs::read_dir(&crashes.path).await?;
|
||||
let mut read_dir = fs::read_dir(&crashes.local_path).await?;
|
||||
while let Some(file) = read_dir.next_entry().await? {
|
||||
debug!("Processing file {:?}", file);
|
||||
run_tool(file.path(), &config, &reports_dir).await?;
|
||||
@ -128,9 +128,9 @@ async fn run_existing(config: &Config, reports_dir: &Option<PathBuf>) -> Result<
|
||||
|
||||
async fn already_checked(config: &Config, input: &BlobUrl) -> Result<bool> {
|
||||
let result = if let Some(crashes) = &config.crashes {
|
||||
crashes.url.clone().and_then(|u| u.account()) == input.account()
|
||||
&& crashes.url.clone().and_then(|u| u.container()) == input.container()
|
||||
&& crashes.path.join(input.name()).exists()
|
||||
crashes.remote_path.clone().and_then(|u| u.account()) == input.account()
|
||||
&& crashes.remote_path.clone().and_then(|u| u.container()) == input.container()
|
||||
&& crashes.local_path.join(input.name()).exists()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
@ -193,8 +193,8 @@ pub async fn run_tool(
|
||||
.target_options(&config.target_options)
|
||||
.analyzer_exe(&config.analyzer_exe)
|
||||
.analyzer_options(&config.analyzer_options)
|
||||
.output_dir(&config.analysis.path)
|
||||
.tools_dir(&config.tools.path)
|
||||
.output_dir(&config.analysis.local_path)
|
||||
.tools_dir(&config.tools.local_path)
|
||||
.setup_dir(&config.common.setup_dir)
|
||||
.job_id(&config.common.job_id)
|
||||
.task_id(&config.common.task_id)
|
||||
@ -210,11 +210,11 @@ pub async fn run_tool(
|
||||
.set_optional_ref(&config.crashes, |tester, crashes| {
|
||||
tester
|
||||
.set_optional_ref(
|
||||
&crashes.url.clone().and_then(|u| u.account()),
|
||||
&crashes.remote_path.clone().and_then(|u| u.account()),
|
||||
|tester, account| tester.crashes_account(account),
|
||||
)
|
||||
.set_optional_ref(
|
||||
&crashes.url.clone().and_then(|u| u.container()),
|
||||
&crashes.remote_path.clone().and_then(|u| u.container()),
|
||||
|tester, container| tester.crashes_container(container),
|
||||
)
|
||||
});
|
||||
|
@ -116,7 +116,7 @@ impl CoverageTask {
|
||||
let mut seen_inputs = false;
|
||||
// Update the total with the coverage from each seed corpus.
|
||||
for dir in &self.config.readonly_inputs {
|
||||
debug!("recording coverage for {}", dir.path.display());
|
||||
debug!("recording coverage for {}", dir.local_path.display());
|
||||
dir.init_pull().await?;
|
||||
if self.record_corpus_coverage(&mut processor, dir).await? {
|
||||
seen_inputs = true;
|
||||
@ -150,12 +150,14 @@ impl CoverageTask {
|
||||
processor: &mut CoverageProcessor,
|
||||
corpus_dir: &SyncedDir,
|
||||
) -> Result<bool> {
|
||||
let mut corpus = fs::read_dir(&corpus_dir.path).await.with_context(|| {
|
||||
format!(
|
||||
"unable to read corpus coverage directory: {}",
|
||||
corpus_dir.path.display()
|
||||
)
|
||||
})?;
|
||||
let mut corpus = fs::read_dir(&corpus_dir.local_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"unable to read corpus coverage directory: {}",
|
||||
corpus_dir.local_path.display()
|
||||
)
|
||||
})?;
|
||||
let mut seen_inputs = false;
|
||||
|
||||
loop {
|
||||
@ -187,7 +189,7 @@ pub struct CoverageProcessor {
|
||||
impl CoverageProcessor {
|
||||
pub async fn new(config: Arc<Config>) -> Result<Self> {
|
||||
let heartbeat_client = config.common.init_heartbeat().await?;
|
||||
let total = TotalCoverage::new(config.coverage.path.join(TOTAL_COVERAGE));
|
||||
let total = TotalCoverage::new(config.coverage.local_path.join(TOTAL_COVERAGE));
|
||||
let recorder = CoverageRecorder::new(config.clone()).await?;
|
||||
let module_totals = BTreeMap::default();
|
||||
|
||||
@ -209,7 +211,7 @@ impl CoverageProcessor {
|
||||
debug!("updating module info {:?}", module);
|
||||
|
||||
if !self.module_totals.contains_key(&module) {
|
||||
let parent = &self.config.coverage.path.join("by-module");
|
||||
let parent = &self.config.coverage.local_path.join("by-module");
|
||||
fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"unable to create by-module coverage directory: {}",
|
||||
|
@ -98,7 +98,7 @@ impl CoverageRecorder {
|
||||
|
||||
let coverage_path = {
|
||||
let digest = digest_file(test_input).await?;
|
||||
self.config.coverage.path.join("inputs").join(digest)
|
||||
self.config.coverage.local_path.join("inputs").join(digest)
|
||||
};
|
||||
|
||||
fs::create_dir_all(&coverage_path).await.with_context(|| {
|
||||
|
@ -64,7 +64,7 @@ impl GeneratorTask {
|
||||
self.config.crashes.init().await?;
|
||||
if let Some(tools) = &self.config.tools {
|
||||
tools.init_pull().await?;
|
||||
set_executable(&tools.path).await?;
|
||||
set_executable(&tools.local_path).await?;
|
||||
}
|
||||
|
||||
let hb_client = self.config.common.init_heartbeat().await?;
|
||||
@ -104,7 +104,7 @@ impl GeneratorTask {
|
||||
loop {
|
||||
for corpus_dir in &self.config.readonly_inputs {
|
||||
heartbeat_client.alive();
|
||||
let corpus_dir = &corpus_dir.path;
|
||||
let corpus_dir = &corpus_dir.local_path;
|
||||
let generated_inputs = tempdir()?;
|
||||
let generated_inputs_path = generated_inputs.path();
|
||||
|
||||
@ -131,7 +131,7 @@ impl GeneratorTask {
|
||||
file.file_name()
|
||||
};
|
||||
|
||||
let destination_file = self.config.crashes.path.join(destination_file);
|
||||
let destination_file = self.config.crashes.local_path.join(destination_file);
|
||||
if tester.is_crash(file.path()).await? {
|
||||
fs::rename(file.path(), &destination_file).await?;
|
||||
debug!("crash found {}", destination_file.display());
|
||||
@ -162,7 +162,7 @@ impl GeneratorTask {
|
||||
tester.instance_telemetry_key(&key)
|
||||
})
|
||||
.set_optional_ref(&self.config.tools, |expand, tools| {
|
||||
expand.tools_dir(&tools.path)
|
||||
expand.tools_dir(&tools.local_path)
|
||||
});
|
||||
|
||||
let generator_path = expand.evaluate_value(&self.config.generator_exe)?;
|
||||
@ -240,20 +240,20 @@ mod tests {
|
||||
generator_exe: String::from("{tools_dir}/radamsa"),
|
||||
generator_options,
|
||||
readonly_inputs: vec![SyncedDir {
|
||||
path: readonly_inputs_local,
|
||||
url: Some(BlobContainerUrl::parse(
|
||||
local_path: readonly_inputs_local,
|
||||
remote_path: Some(BlobContainerUrl::parse(
|
||||
Url::from_directory_path(inputs).unwrap(),
|
||||
)?),
|
||||
}],
|
||||
crashes: SyncedDir {
|
||||
path: crashes_local,
|
||||
url: Some(BlobContainerUrl::parse(
|
||||
local_path: crashes_local,
|
||||
remote_path: Some(BlobContainerUrl::parse(
|
||||
Url::from_directory_path(crashes).unwrap(),
|
||||
)?),
|
||||
},
|
||||
tools: Some(SyncedDir {
|
||||
path: tools_local,
|
||||
url: Some(BlobContainerUrl::parse(
|
||||
local_path: tools_local,
|
||||
remote_path: Some(BlobContainerUrl::parse(
|
||||
Url::from_directory_path(radamsa_dir).unwrap(),
|
||||
)?),
|
||||
}),
|
||||
|
@ -104,9 +104,12 @@ impl LibFuzzerFuzzTask {
|
||||
}
|
||||
|
||||
pub async fn verify(&self) -> Result<()> {
|
||||
let mut directories = vec![self.config.inputs.path.clone()];
|
||||
let mut directories = vec![self.config.inputs.local_path.clone()];
|
||||
if let Some(readonly_inputs) = &self.config.readonly_inputs {
|
||||
let mut dirs = readonly_inputs.iter().map(|x| x.path.clone()).collect();
|
||||
let mut dirs = readonly_inputs
|
||||
.iter()
|
||||
.map(|x| x.local_path.clone())
|
||||
.collect();
|
||||
directories.append(&mut dirs);
|
||||
}
|
||||
|
||||
@ -136,7 +139,7 @@ impl LibFuzzerFuzzTask {
|
||||
let task_dir = self
|
||||
.config
|
||||
.inputs
|
||||
.path
|
||||
.local_path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("Invalid input path"))?;
|
||||
let temp_path = task_dir.join(".temp");
|
||||
@ -161,7 +164,12 @@ impl LibFuzzerFuzzTask {
|
||||
|
||||
let mut entries = tokio::fs::read_dir(local_input_dir.path()).await?;
|
||||
while let Ok(Some(entry)) = entries.next_entry().await {
|
||||
let destination_path = self.config.inputs.path.clone().join(entry.file_name());
|
||||
let destination_path = self
|
||||
.config
|
||||
.inputs
|
||||
.local_path
|
||||
.clone()
|
||||
.join(entry.file_name());
|
||||
tokio::fs::rename(&entry.path(), &destination_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
@ -189,9 +197,11 @@ impl LibFuzzerFuzzTask {
|
||||
|
||||
debug!("starting fuzzer run, run_id = {}", run_id);
|
||||
|
||||
let mut inputs = vec![&self.config.inputs.path];
|
||||
let mut inputs = vec![&self.config.inputs.local_path];
|
||||
if let Some(readonly_inputs) = &self.config.readonly_inputs {
|
||||
readonly_inputs.iter().for_each(|d| inputs.push(&d.path));
|
||||
readonly_inputs
|
||||
.iter()
|
||||
.for_each(|d| inputs.push(&d.local_path));
|
||||
}
|
||||
|
||||
let fuzzer = LibFuzzer::new(
|
||||
@ -262,7 +272,7 @@ impl LibFuzzerFuzzTask {
|
||||
|
||||
for file in &files {
|
||||
if let Some(filename) = file.file_name() {
|
||||
let dest = self.config.crashes.path.join(filename);
|
||||
let dest = self.config.crashes.local_path.join(filename);
|
||||
if let Err(e) = tokio::fs::rename(file.clone(), dest.clone()).await {
|
||||
if !dest.exists() {
|
||||
bail!(e)
|
||||
|
@ -62,13 +62,13 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
|
||||
// setup tools
|
||||
if let Some(tools) = &config.tools {
|
||||
tools.init_pull().await?;
|
||||
set_executable(&tools.path).await?;
|
||||
set_executable(&tools.local_path).await?;
|
||||
}
|
||||
|
||||
// setup crashes
|
||||
let crashes = SyncedDir {
|
||||
path: runtime_dir.path().join("crashes"),
|
||||
url: config.crashes.url.clone(),
|
||||
local_path: runtime_dir.path().join("crashes"),
|
||||
remote_path: config.crashes.remote_path.clone(),
|
||||
};
|
||||
crashes.init().await?;
|
||||
let monitor_crashes = crashes.monitor_results(new_result, false);
|
||||
@ -92,8 +92,8 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
|
||||
);
|
||||
|
||||
let inputs = SyncedDir {
|
||||
path: runtime_dir.path().join("inputs"),
|
||||
url: config.inputs.url.clone(),
|
||||
local_path: runtime_dir.path().join("inputs"),
|
||||
remote_path: config.inputs.remote_path.clone(),
|
||||
};
|
||||
|
||||
inputs.init().await?;
|
||||
@ -105,7 +105,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
|
||||
let delay = std::time::Duration::from_secs(10);
|
||||
loop {
|
||||
dir.sync_pull().await?;
|
||||
if has_files(&dir.path).await? {
|
||||
if has_files(&dir.local_path).await? {
|
||||
break;
|
||||
}
|
||||
delay_with_jitter(delay).await;
|
||||
@ -177,13 +177,15 @@ async fn start_supervisor(
|
||||
.supervisor_exe(&config.supervisor_exe)
|
||||
.supervisor_options(&config.supervisor_options)
|
||||
.runtime_dir(&runtime_dir)
|
||||
.crashes(&crashes.path)
|
||||
.input_corpus(&inputs.path)
|
||||
.crashes(&crashes.local_path)
|
||||
.input_corpus(&inputs.local_path)
|
||||
.reports_dir(&reports_dir)
|
||||
.setup_dir(&config.common.setup_dir)
|
||||
.job_id(&config.common.job_id)
|
||||
.task_id(&config.common.task_id)
|
||||
.set_optional_ref(&config.tools, |expand, tools| expand.tools_dir(&tools.path))
|
||||
.set_optional_ref(&config.tools, |expand, tools| {
|
||||
expand.tools_dir(&tools.local_path)
|
||||
})
|
||||
.set_optional_ref(&config.target_exe, |expand, target_exe| {
|
||||
expand.target_exe(target_exe)
|
||||
})
|
||||
@ -200,11 +202,15 @@ async fn start_supervisor(
|
||||
tester.instance_telemetry_key(&key)
|
||||
})
|
||||
.set_optional_ref(
|
||||
&config.crashes.url.clone().and_then(|u| u.account()),
|
||||
&config.crashes.remote_path.clone().and_then(|u| u.account()),
|
||||
|tester, account| tester.crashes_account(account),
|
||||
)
|
||||
.set_optional_ref(
|
||||
&config.crashes.url.clone().and_then(|u| u.container()),
|
||||
&config
|
||||
.crashes
|
||||
.remote_path
|
||||
.clone()
|
||||
.and_then(|u| u.container()),
|
||||
|tester, container| tester.crashes_container(container),
|
||||
);
|
||||
|
||||
@ -286,21 +292,21 @@ mod tests {
|
||||
let crashes_local = tempfile::tempdir().unwrap().path().into();
|
||||
let corpus_dir_local = tempfile::tempdir().unwrap().path().into();
|
||||
let crashes = SyncedDir {
|
||||
path: crashes_local,
|
||||
url: Some(
|
||||
local_path: crashes_local,
|
||||
remote_path: Some(
|
||||
BlobContainerUrl::parse(Url::from_directory_path(fault_dir_temp).unwrap()).unwrap(),
|
||||
),
|
||||
};
|
||||
|
||||
let corpus_dir_temp = tempfile::tempdir().unwrap();
|
||||
let corpus_dir = SyncedDir {
|
||||
path: corpus_dir_local,
|
||||
url: Some(
|
||||
local_path: corpus_dir_local,
|
||||
remote_path: Some(
|
||||
BlobContainerUrl::parse(Url::from_directory_path(corpus_dir_temp).unwrap())
|
||||
.unwrap(),
|
||||
),
|
||||
};
|
||||
let seed_file_name = corpus_dir.path.join("seed.txt");
|
||||
let seed_file_name = corpus_dir.local_path.join("seed.txt");
|
||||
tokio::fs::write(seed_file_name, "xyz").await.unwrap();
|
||||
|
||||
let target_options = Some(vec!["{input}".to_owned()]);
|
||||
@ -349,7 +355,7 @@ mod tests {
|
||||
let notify = Notify::new();
|
||||
let _fuzzing_monitor =
|
||||
monitor_process(process, "supervisor".to_string(), false, Some(¬ify));
|
||||
let stat_output = crashes.path.join("fuzzer_stats");
|
||||
let stat_output = crashes.local_path.join("fuzzer_stats");
|
||||
let start = Instant::now();
|
||||
loop {
|
||||
if has_stats(&stat_output).await {
|
||||
|
@ -128,10 +128,10 @@ impl<M> InputPoller<M> {
|
||||
info!(
|
||||
"batch processing directory: {} - {}",
|
||||
self.name,
|
||||
to_process.path.display()
|
||||
to_process.local_path.display()
|
||||
);
|
||||
|
||||
let mut read_dir = fs::read_dir(&to_process.path).await?;
|
||||
let mut read_dir = fs::read_dir(&to_process.local_path).await?;
|
||||
while let Some(file) = read_dir.next_entry().await? {
|
||||
let path = file.path();
|
||||
info!(
|
||||
@ -143,7 +143,7 @@ impl<M> InputPoller<M> {
|
||||
// Compute the file name relative to the synced directory, and thus the
|
||||
// container.
|
||||
let blob_name = {
|
||||
let dir_path = to_process.path.canonicalize()?;
|
||||
let dir_path = to_process.local_path.canonicalize()?;
|
||||
let input_path = path.canonicalize()?;
|
||||
let dir_relative = input_path.strip_prefix(&dir_path)?;
|
||||
dir_relative.display().to_string()
|
||||
@ -161,7 +161,7 @@ impl<M> InputPoller<M> {
|
||||
if let Ok(blob) = BlobUrl::new(url.clone()) {
|
||||
batch_dir.try_url().and_then(|u| u.account()) == blob.account()
|
||||
&& batch_dir.try_url().and_then(|u| u.container()) == blob.container()
|
||||
&& batch_dir.path.join(blob.name()).exists()
|
||||
&& batch_dir.local_path.join(blob.name()).exists()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ pub struct Config {
|
||||
|
||||
pub async fn spawn(config: Arc<Config>) -> Result<()> {
|
||||
config.tools.init_pull().await?;
|
||||
set_executable(&config.tools.path).await?;
|
||||
set_executable(&config.tools.local_path).await?;
|
||||
|
||||
config.unique_inputs.init().await?;
|
||||
let hb_client = config.common.init_heartbeat().await?;
|
||||
@ -94,7 +94,8 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
|
||||
}
|
||||
|
||||
async fn process_message(config: Arc<Config>, input_url: &Url, tmp_dir: &Path) -> Result<()> {
|
||||
let input_path = utils::download_input(input_url.clone(), &config.unique_inputs.path).await?;
|
||||
let input_path =
|
||||
utils::download_input(input_url.clone(), &config.unique_inputs.local_path).await?;
|
||||
info!("downloaded input to {}", input_path.display());
|
||||
|
||||
info!("Merging corpus");
|
||||
@ -105,8 +106,8 @@ async fn process_message(config: Arc<Config>, input_url: &Url, tmp_dir: &Path) -
|
||||
queue_dir.push("queue");
|
||||
let _delete_output = tokio::fs::remove_dir_all(queue_dir).await;
|
||||
let synced_dir = SyncedDir {
|
||||
path: tmp_dir.to_path_buf(),
|
||||
url: config.unique_inputs.url.clone(),
|
||||
local_path: tmp_dir.to_path_buf(),
|
||||
remote_path: config.unique_inputs.remote_path.clone(),
|
||||
};
|
||||
synced_dir.sync_push().await?
|
||||
}
|
||||
@ -131,14 +132,14 @@ async fn try_delete_blob(input_url: Url) -> Result<()> {
|
||||
async fn merge(config: &Config, output_dir: impl AsRef<Path>) -> Result<()> {
|
||||
let expand = Expand::new()
|
||||
.input_marker(&config.supervisor_input_marker)
|
||||
.input_corpus(&config.unique_inputs.path)
|
||||
.input_corpus(&config.unique_inputs.local_path)
|
||||
.target_options(&config.target_options)
|
||||
.supervisor_exe(&config.supervisor_exe)
|
||||
.supervisor_options(&config.supervisor_options)
|
||||
.generated_inputs(output_dir)
|
||||
.target_exe(&config.target_exe)
|
||||
.setup_dir(&config.common.setup_dir)
|
||||
.tools_dir(&config.tools.path)
|
||||
.tools_dir(&config.tools.local_path)
|
||||
.job_id(&config.common.job_id)
|
||||
.task_id(&config.common.task_id)
|
||||
.set_optional_ref(&config.common.microsoft_telemetry_key, |tester, key| {
|
||||
|
@ -70,7 +70,7 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
|
||||
input.init().await?;
|
||||
input.sync_pull().await?;
|
||||
}
|
||||
let input_paths = config.inputs.iter().map(|i| &i.path).collect();
|
||||
let input_paths = config.inputs.iter().map(|i| &i.local_path).collect();
|
||||
sync_and_merge(
|
||||
config.clone(),
|
||||
input_paths,
|
||||
@ -166,7 +166,9 @@ pub async fn merge_inputs(
|
||||
&config.target_env,
|
||||
&config.common.setup_dir,
|
||||
);
|
||||
merger.merge(&config.unique_inputs.path, &candidates).await
|
||||
merger
|
||||
.merge(&config.unique_inputs.local_path, &candidates)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn try_delete_blob(input_url: Url) -> Result<()> {
|
||||
|
@ -73,7 +73,7 @@ pub async fn handle_inputs(
|
||||
heartbeat_client: &Option<TaskHeartbeatClient>,
|
||||
) -> Result<()> {
|
||||
readonly_inputs.init_pull().await?;
|
||||
let mut input_files = tokio::fs::read_dir(&readonly_inputs.path).await?;
|
||||
let mut input_files = tokio::fs::read_dir(&readonly_inputs.local_path).await?;
|
||||
while let Some(file) = input_files.next_entry().await? {
|
||||
heartbeat_client.alive();
|
||||
|
||||
@ -88,7 +88,7 @@ pub async fn handle_inputs(
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let input_url = readonly_inputs.remote_url()?.url().join(&file_name)?;
|
||||
let input_url = readonly_inputs.remote_url()?.url()?.join(&file_name)?;
|
||||
|
||||
let crash_test_result = handler.get_crash_result(file_path, input_url).await?;
|
||||
RegressionReport {
|
||||
@ -120,7 +120,7 @@ pub async fn handle_crash_reports(
|
||||
for possible_dir in report_dirs {
|
||||
possible_dir.init_pull().await?;
|
||||
|
||||
let mut report_files = tokio::fs::read_dir(&possible_dir.path).await?;
|
||||
let mut report_files = tokio::fs::read_dir(&possible_dir.local_path).await?;
|
||||
while let Some(file) = report_files.next_entry().await? {
|
||||
heartbeat_client.alive();
|
||||
let file_path = file.path();
|
||||
@ -150,8 +150,8 @@ pub async fn handle_crash_reports(
|
||||
}
|
||||
.ok_or_else(|| format_err!("crash report is missing input blob: {}", file_name))?;
|
||||
|
||||
let input_url = crashes.remote_url()?.url().clone();
|
||||
let input = crashes.path.join(&input_blob.name);
|
||||
let input_url = crashes.remote_url()?.url()?;
|
||||
let input = crashes.local_path.join(&input_blob.name);
|
||||
let crash_test_result = handler.get_crash_result(input, input_url).await?;
|
||||
|
||||
RegressionReport {
|
||||
|
Reference in New Issue
Block a user