move verbose to debug to align with log and opentelemetry (#541)

This commit is contained in:
bmc-msft
2021-02-11 16:49:27 -05:00
committed by GitHub
parent a3d73a240d
commit 360693e8a4
23 changed files with 68 additions and 72 deletions

View File

@ -58,7 +58,7 @@ async fn run_existing(config: &Config) -> Result<()> {
let mut count = 0; let mut count = 0;
let mut read_dir = fs::read_dir(&crashes.path).await?; let mut read_dir = fs::read_dir(&crashes.path).await?;
while let Some(file) = read_dir.next().await { while let Some(file) = read_dir.next().await {
verbose!("Processing file {:?}", file); debug!("Processing file {:?}", file);
let file = file?; let file = file?;
run_tool(file.path(), &config).await?; run_tool(file.path(), &config).await?;
count += 1; count += 1;

View File

@ -134,7 +134,7 @@ impl CoverageTask {
let mut seen_inputs = false; let mut seen_inputs = false;
// Update the total with the coverage from each seed corpus. // Update the total with the coverage from each seed corpus.
for dir in &self.config.readonly_inputs { for dir in &self.config.readonly_inputs {
verbose!("recording coverage for {}", dir.path.display()); debug!("recording coverage for {}", dir.path.display());
dir.init_pull().await?; dir.init_pull().await?;
if self.record_corpus_coverage(&mut processor, dir).await? { if self.record_corpus_coverage(&mut processor, dir).await? {
seen_inputs = true; seen_inputs = true;
@ -227,7 +227,7 @@ impl CoverageProcessor {
.ok_or_else(|| format_err!("module must have filename"))? .ok_or_else(|| format_err!("module must have filename"))?
.to_os_string(); .to_os_string();
verbose!("updating module info {:?}", module); debug!("updating module info {:?}", module);
if !self.module_totals.contains_key(&module) { if !self.module_totals.contains_key(&module) {
let parent = &self.config.coverage.path.join("by-module"); let parent = &self.config.coverage.path.join("by-module");
@ -244,7 +244,7 @@ impl CoverageProcessor {
self.module_totals[&module].update_bytes(data).await?; self.module_totals[&module].update_bytes(data).await?;
verbose!("updated {:?}", module); debug!("updated {:?}", module);
Ok(()) Ok(())
} }
@ -254,7 +254,7 @@ impl CoverageProcessor {
let mut sum = Vec::new(); let mut sum = Vec::new();
for file in &files { for file in &files {
verbose!("checking {:?}", file); debug!("checking {:?}", file);
let mut content = fs::read(file) let mut content = fs::read(file)
.await .await
.with_context(|| format!("unable to read module coverage: {}", file.display()))?; .with_context(|| format!("unable to read module coverage: {}", file.display()))?;

View File

@ -71,11 +71,11 @@ impl CoverageRecorder {
return Err(err); return Err(err);
} else { } else {
verbose!( debug!(
"recording stderr: {}", "recording stderr: {}",
String::from_utf8_lossy(&output.stderr) String::from_utf8_lossy(&output.stderr)
); );
verbose!( debug!(
"recording stdout: {}", "recording stdout: {}",
String::from_utf8_lossy(&output.stdout) String::from_utf8_lossy(&output.stdout)
); );

View File

@ -127,7 +127,7 @@ impl GeneratorTask {
while let Some(file) = read_dir.next().await { while let Some(file) = read_dir.next().await {
let file = file?; let file = file?;
verbose!("testing input: {:?}", file); debug!("testing input: {:?}", file);
let destination_file = if self.config.rename_output { let destination_file = if self.config.rename_output {
let hash = sha256::digest_file(file.path()).await?; let hash = sha256::digest_file(file.path()).await?;
@ -139,7 +139,7 @@ impl GeneratorTask {
let destination_file = self.config.crashes.path.join(destination_file); let destination_file = self.config.crashes.path.join(destination_file);
if tester.is_crash(file.path()).await? { if tester.is_crash(file.path()).await? {
fs::rename(file.path(), &destination_file).await?; fs::rename(file.path(), &destination_file).await?;
verbose!("crash found {}", destination_file.display()); debug!("crash found {}", destination_file.display());
} }
} }
Ok(()) Ok(())

View File

@ -168,7 +168,7 @@ impl LibFuzzerFuzzTask {
let crash_dir = tempdir()?; let crash_dir = tempdir()?;
let run_id = Uuid::new_v4(); let run_id = Uuid::new_v4();
verbose!("starting fuzzer run, run_id = {}", run_id); debug!("starting fuzzer run, run_id = {}", run_id);
let mut inputs = vec![&self.config.inputs.path]; let mut inputs = vec![&self.config.inputs.path];
if let Some(readonly_inputs) = &self.config.readonly_inputs { if let Some(readonly_inputs) = &self.config.readonly_inputs {

View File

@ -137,7 +137,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
.evaluate_value(stats_file)?, .evaluate_value(stats_file)?,
) )
} else { } else {
verbose!("no stats file to monitor"); debug!("no stats file to monitor");
None None
}; };

View File

@ -161,7 +161,7 @@ impl<M> InputPoller<M> {
loop { loop {
match self.state() { match self.state() {
State::Polled(None) => { State::Polled(None) => {
verbose!("Input queue empty, sleeping"); debug!("Input queue empty, sleeping");
delay_with_jitter(POLL_INTERVAL).await; delay_with_jitter(POLL_INTERVAL).await;
} }
State::Downloaded(_msg, _url, input, _tempdir) => { State::Downloaded(_msg, _url, input, _tempdir) => {
@ -241,7 +241,7 @@ impl<M> InputPoller<M> {
(Parsed(msg, url), Download(downloader)) => { (Parsed(msg, url), Download(downloader)) => {
let download_dir = tempdir()?; let download_dir = tempdir()?;
if self.seen_in_batch(&url).await? { if self.seen_in_batch(&url).await? {
verbose!("url was seen during batch processing: {:?}", url); debug!("url was seen during batch processing: {:?}", url);
self.set_state(Processed(msg)); self.set_state(Processed(msg));
} else { } else {
let input = downloader let input = downloader

View File

@ -53,7 +53,7 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
loop { loop {
hb_client.alive(); hb_client.alive();
let tmp_dir = PathBuf::from("./tmp"); let tmp_dir = PathBuf::from("./tmp");
verbose!("tmp dir reset"); debug!("tmp dir reset");
utils::reset_tmp_dir(&tmp_dir).await?; utils::reset_tmp_dir(&tmp_dir).await?;
config.unique_inputs.sync_pull().await?; config.unique_inputs.sync_pull().await?;
let mut queue = QueueClient::new(config.input_queue.clone()); let mut queue = QueueClient::new(config.input_queue.clone());
@ -72,11 +72,11 @@ pub async fn spawn(config: Arc<Config>) -> Result<()> {
error error
); );
} else { } else {
verbose!("will delete popped message with id = {}", msg.id()); debug!("will delete popped message with id = {}", msg.id());
queue.delete(msg).await?; queue.delete(msg).await?;
verbose!( debug!(
"Attempting to delete {} from the candidate container", "Attempting to delete {} from the candidate container",
input_url.clone() input_url.clone()
); );

View File

@ -89,7 +89,7 @@ async fn process_message(config: Arc<Config>, mut input_queue: QueueClient) -> R
let hb_client = config.common.init_heartbeat().await?; let hb_client = config.common.init_heartbeat().await?;
hb_client.alive(); hb_client.alive();
let tmp_dir = "./tmp"; let tmp_dir = "./tmp";
verbose!("tmp dir reset"); debug!("tmp dir reset");
utils::reset_tmp_dir(tmp_dir).await?; utils::reset_tmp_dir(tmp_dir).await?;
if let Some(msg) = input_queue.pop().await? { if let Some(msg) = input_queue.pop().await? {
@ -105,11 +105,11 @@ async fn process_message(config: Arc<Config>, mut input_queue: QueueClient) -> R
info!("downloaded input to {}", input_path.display()); info!("downloaded input to {}", input_path.display());
sync_and_merge(config.clone(), vec![tmp_dir], true, true).await?; sync_and_merge(config.clone(), vec![tmp_dir], true, true).await?;
verbose!("will delete popped message with id = {}", msg.id()); debug!("will delete popped message with id = {}", msg.id());
input_queue.delete(msg).await?; input_queue.delete(msg).await?;
verbose!( debug!(
"Attempting to delete {} from the candidate container", "Attempting to delete {} from the candidate container",
input_url.clone() input_url.clone()
); );

View File

@ -224,7 +224,7 @@ pub async fn monitor_reports(
no_crash: &Option<SyncedDir>, no_crash: &Option<SyncedDir>,
) -> Result<()> { ) -> Result<()> {
if unique_reports.is_none() && reports.is_none() && no_crash.is_none() { if unique_reports.is_none() && reports.is_none() && no_crash.is_none() {
verbose!("no report directories configured"); debug!("no report directories configured");
return Ok(()); return Ok(());
} }

View File

@ -198,7 +198,7 @@ impl<'a> GenericReportProcessor<'a> {
#[async_trait] #[async_trait]
impl<'a> Processor for GenericReportProcessor<'a> { impl<'a> Processor for GenericReportProcessor<'a> {
async fn process(&mut self, url: Option<Url>, input: &Path) -> Result<()> { async fn process(&mut self, url: Option<Url>, input: &Path) -> Result<()> {
verbose!("generating crash report for: {}", input.display()); debug!("generating crash report for: {}", input.display());
let report = self.test_input(url, input).await?; let report = self.test_input(url, input).await?;
report report
.save( .save(

View File

@ -195,7 +195,7 @@ impl AsanProcessor {
#[async_trait] #[async_trait]
impl Processor for AsanProcessor { impl Processor for AsanProcessor {
async fn process(&mut self, url: Option<Url>, input: &Path) -> Result<()> { async fn process(&mut self, url: Option<Url>, input: &Path) -> Result<()> {
verbose!("processing libfuzzer crash url:{:?} path:{:?}", url, input); debug!("processing libfuzzer crash url:{:?} path:{:?}", url, input);
let report = self.test_input(url, input).await?; let report = self.test_input(url, input).await?;
report report
.save( .save(

View File

@ -41,12 +41,12 @@ pub async fn reset_tmp_dir(tmp_dir: impl AsRef<Path>) -> Result<()> {
if dir_exists { if dir_exists {
fs::remove_dir_all(tmp_dir).await?; fs::remove_dir_all(tmp_dir).await?;
verbose!("deleted {}", tmp_dir.display()); debug!("deleted {}", tmp_dir.display());
} }
fs::create_dir_all(tmp_dir).await?; fs::create_dir_all(tmp_dir).await?;
verbose!("created {}", tmp_dir.display()); debug!("created {}", tmp_dir.display());
Ok(()) Ok(())
} }

View File

@ -76,7 +76,7 @@ impl Agent {
let done = self.update().await?; let done = self.update().await?;
if done { if done {
verbose!("agent done, exiting loop"); debug!("agent done, exiting loop");
break; break;
} }
} }
@ -123,7 +123,7 @@ impl Agent {
let msg = self.work_queue.poll().await?; let msg = self.work_queue.poll().await?;
let next = if let Some(msg) = msg { let next = if let Some(msg) = msg {
verbose!("received work set message: {:?}", msg); debug!("received work set message: {:?}", msg);
let can_schedule = self.coordinator.can_schedule(&msg.work_set).await?; let can_schedule = self.coordinator.can_schedule(&msg.work_set).await?;
@ -169,7 +169,7 @@ impl Agent {
// Otherwise, the work was not stopped, but we still should not execute it. This is likely // Otherwise, the work was not stopped, but we still should not execute it. This is likely
// our because agent version is out of date. Do nothing, so another node can see the work. // our because agent version is out of date. Do nothing, so another node can see the work.
// The service will eventually send us a stop command and reimage our node, if appropriate. // The service will eventually send us a stop command and reimage our node, if appropriate.
verbose!( debug!(
"not scheduling active work set, not dropping: {:?}", "not scheduling active work set, not dropping: {:?}",
msg.work_set msg.work_set
); );
@ -187,7 +187,7 @@ impl Agent {
} }
async fn setting_up(&mut self, state: State<SettingUp>) -> Result<Scheduler> { async fn setting_up(&mut self, state: State<SettingUp>) -> Result<Scheduler> {
verbose!("agent setting up"); debug!("agent setting up");
let tasks = state.work_set().task_ids(); let tasks = state.work_set().task_ids();
self.emit_state_update_if_changed(StateUpdateEvent::SettingUp { tasks }) self.emit_state_update_if_changed(StateUpdateEvent::SettingUp { tasks })
@ -203,7 +203,7 @@ impl Agent {
} }
async fn pending_reboot(&mut self, state: State<PendingReboot>) -> Result<Scheduler> { async fn pending_reboot(&mut self, state: State<PendingReboot>) -> Result<Scheduler> {
verbose!("agent pending reboot"); debug!("agent pending reboot");
self.emit_state_update_if_changed(StateUpdateEvent::Rebooting) self.emit_state_update_if_changed(StateUpdateEvent::Rebooting)
.await?; .await?;
@ -215,7 +215,7 @@ impl Agent {
} }
async fn ready(&mut self, state: State<Ready>) -> Result<Scheduler> { async fn ready(&mut self, state: State<Ready>) -> Result<Scheduler> {
verbose!("agent ready"); debug!("agent ready");
self.emit_state_update_if_changed(StateUpdateEvent::Ready) self.emit_state_update_if_changed(StateUpdateEvent::Ready)
.await?; .await?;
Ok(state.run().await?.into()) Ok(state.run().await?.into())
@ -237,7 +237,7 @@ impl Agent {
} }
async fn done(&mut self, state: State<Done>) -> Result<Scheduler> { async fn done(&mut self, state: State<Done>) -> Result<Scheduler> {
verbose!("agent done"); debug!("agent done");
set_done_lock().await?; set_done_lock().await?;
let event = match state.cause() { let event = match state.cause() {
@ -263,7 +263,7 @@ impl Agent {
let cmd = self.coordinator.poll_commands().await?; let cmd = self.coordinator.poll_commands().await?;
if let Some(cmd) = cmd { if let Some(cmd) = cmd {
verbose!("agent received node command: {:?}", cmd); debug!("agent received node command: {:?}", cmd);
self.scheduler()?.execute_command(cmd).await?; self.scheduler()?.execute_command(cmd).await?;
} }

View File

@ -44,7 +44,7 @@ pub async fn add_ssh_key(key_info: SshKeyInfo) -> Result<()> {
.await?; .await?;
} }
verbose!("removing Authenticated Users permissions from administrators_authorized_keys"); debug!("removing Authenticated Users permissions from administrators_authorized_keys");
let result = Command::new("icacls.exe") let result = Command::new("icacls.exe")
.arg(&admin_auth_keys_path) .arg(&admin_auth_keys_path)
.arg("/remove") .arg("/remove")
@ -65,7 +65,7 @@ pub async fn add_ssh_key(key_info: SshKeyInfo) -> Result<()> {
); );
} }
verbose!("removing inheritance"); debug!("removing inheritance");
let result = Command::new("icacls.exe") let result = Command::new("icacls.exe")
.arg(&admin_auth_keys_path) .arg(&admin_auth_keys_path)
.arg("/inheritance:r") .arg("/inheritance:r")
@ -85,7 +85,7 @@ pub async fn add_ssh_key(key_info: SshKeyInfo) -> Result<()> {
); );
} }
verbose!("copying ACL from ssh_host_dsa_key"); debug!("copying ACL from ssh_host_dsa_key");
let result = Command::new("powershell.exe") let result = Command::new("powershell.exe")
.args(&["-ExecutionPolicy", "Unrestricted", "-Command"]) .args(&["-ExecutionPolicy", "Unrestricted", "-Command"])
.arg(format!( .arg(format!(
@ -132,11 +132,11 @@ pub async fn add_ssh_key(key_info: SshKeyInfo) -> Result<()> {
let mut ssh_path = home_path.join(".ssh"); let mut ssh_path = home_path.join(".ssh");
if !ssh_path.exists() { if !ssh_path.exists() {
verbose!("creating ssh directory: {}", ssh_path.display()); debug!("creating ssh directory: {}", ssh_path.display());
fs::create_dir_all(&ssh_path).await?; fs::create_dir_all(&ssh_path).await?;
} }
verbose!("setting ssh permissions"); debug!("setting ssh permissions");
let result = Command::new("chmod") let result = Command::new("chmod")
.arg("700") .arg("700")
.arg(&ssh_path) .arg(&ssh_path)
@ -160,7 +160,7 @@ pub async fn add_ssh_key(key_info: SshKeyInfo) -> Result<()> {
.await?; .await?;
} }
verbose!("setting authorized_keys permissions"); debug!("setting authorized_keys permissions");
let result = Command::new("chmod") let result = Command::new("chmod")
.arg("600") .arg("600")
.arg(&ssh_path) .arg(&ssh_path)

View File

@ -237,12 +237,12 @@ impl Coordinator {
let mut response = self.client.execute(request).await?; let mut response = self.client.execute(request).await?;
if response.status() == StatusCode::UNAUTHORIZED { if response.status() == StatusCode::UNAUTHORIZED {
verbose!("access token expired, renewing"); debug!("access token expired, renewing");
// If we didn't succeed due to authorization, refresh our token, // If we didn't succeed due to authorization, refresh our token,
self.token = self.registration.config.credentials.access_token().await?; self.token = self.registration.config.credentials.access_token().await?;
verbose!("retrying request after refreshing access token"); debug!("retrying request after refreshing access token");
// And try one more time. // And try one more time.
let request = self.build_request(request_type)?; let request = self.build_request(request_type)?;
@ -322,7 +322,7 @@ impl Coordinator {
task_id, task_id,
}; };
verbose!("checking if able to schedule task ID = {}", task_id); debug!("checking if able to schedule task ID = {}", task_id);
let mut url = self.registration.config.onefuzz_url.clone(); let mut url = self.registration.config.onefuzz_url.clone();
url.set_path("/api/agents/can_schedule"); url.set_path("/api/agents/can_schedule");

View File

@ -89,7 +89,7 @@ fn licenses() -> Result<()> {
fn run(opt: RunOpt) -> Result<()> { fn run(opt: RunOpt) -> Result<()> {
if done::is_agent_done()? { if done::is_agent_done()? {
verbose!( debug!(
"agent is done, remove lock ({}) to continue", "agent is done, remove lock ({}) to continue",
done::done_path()?.display() done::done_path()?.display()
); );
@ -187,10 +187,10 @@ async fn run_agent(config: StaticConfig) -> Result<()> {
} }
} }
}; };
verbose!("current registration: {:?}", registration); debug!("current registration: {:?}", registration);
let mut coordinator = coordinator::Coordinator::new(registration.clone()).await?; let mut coordinator = coordinator::Coordinator::new(registration.clone()).await?;
verbose!("initialized coordinator"); debug!("initialized coordinator");
let mut reboot = reboot::Reboot; let mut reboot = reboot::Reboot;
let reboot_context = reboot.load_context().await?; let reboot_context = reboot.load_context().await?;
@ -198,7 +198,7 @@ async fn run_agent(config: StaticConfig) -> Result<()> {
check_existing_worksets(&mut coordinator).await?; check_existing_worksets(&mut coordinator).await?;
} }
let scheduler = reboot_context.into(); let scheduler = reboot_context.into();
verbose!("loaded scheduler: {}", scheduler); debug!("loaded scheduler: {}", scheduler);
let work_queue = work::WorkQueue::new(registration.clone()); let work_queue = work::WorkQueue::new(registration.clone());

View File

@ -49,7 +49,7 @@ impl Reboot {
.await .await
.with_context(|| format!("unable to save reboot context: {}", path.display()))?; .with_context(|| format!("unable to save reboot context: {}", path.display()))?;
verbose!("reboot context saved"); debug!("reboot context saved");
Ok(()) Ok(())
} }
@ -106,7 +106,7 @@ impl Reboot {
fn wait_for_reboot(&self) -> Result<()> { fn wait_for_reboot(&self) -> Result<()> {
use std::{thread, time}; use std::{thread, time};
verbose!("waiting for reboot"); debug!("waiting for reboot");
// 10 minutes. // 10 minutes.
let d = time::Duration::from_secs(60 * 10); let d = time::Duration::from_secs(60 * 10);

View File

@ -50,7 +50,7 @@ impl SetupRunner {
})?; })?;
az_copy::sync(setup_url.to_string(), &setup_dir, false).await?; az_copy::sync(setup_url.to_string(), &setup_dir, false).await?;
verbose!( debug!(
"synced setup container from {} to {}", "synced setup container from {} to {}",
setup_url, setup_url,
setup_dir.display(), setup_dir.display(),
@ -76,10 +76,9 @@ impl SetupRunner {
let output = setup_script.invoke().await?; let output = setup_script.invoke().await?;
if output.exit_status.success { if output.exit_status.success {
verbose!( debug!(
"setup script succeeded. stdout:{:?} stderr:{:?}", "setup script succeeded. stdout:{:?} stderr:{:?}",
&output.stdout, &output.stdout, &output.stderr,
&output.stderr,
); );
info!("setup script succeeded"); info!("setup script succeeded");
} else { } else {
@ -122,7 +121,7 @@ async fn create_setup_symlink(setup_dir: &Path, work_unit: &WorkUnit) -> Result<
let blocking = spawn_blocking(move || symlink_dir(src, dst)); let blocking = spawn_blocking(move || symlink_dir(src, dst));
blocking.await??; blocking.await??;
verbose!( debug!(
"created symlink from {} to {}", "created symlink from {} to {}",
setup_dir.display(), setup_dir.display(),
task_setup_dir.display(), task_setup_dir.display(),
@ -157,7 +156,7 @@ async fn create_setup_symlink(setup_dir: &Path, work_unit: &WorkUnit) -> Result<
) )
})?; })?;
verbose!( debug!(
"created symlink from {} to {}", "created symlink from {} to {}",
setup_dir.display(), setup_dir.display(),
task_setup_dir.display(), task_setup_dir.display(),

View File

@ -194,7 +194,7 @@ impl IWorkerRunner for WorkerRunner {
async fn run(&mut self, setup_dir: &Path, work: &WorkUnit) -> Result<Box<dyn IWorkerChild>> { async fn run(&mut self, setup_dir: &Path, work: &WorkUnit) -> Result<Box<dyn IWorkerChild>> {
let working_dir = work.working_dir()?; let working_dir = work.working_dir()?;
verbose!("worker working dir = {}", working_dir.display()); debug!("worker working dir = {}", working_dir.display());
fs::create_dir_all(&working_dir).await.with_context(|| { fs::create_dir_all(&working_dir).await.with_context(|| {
format!( format!(
@ -203,7 +203,7 @@ impl IWorkerRunner for WorkerRunner {
) )
})?; })?;
verbose!("created worker working dir: {}", working_dir.display()); debug!("created worker working dir: {}", working_dir.display());
let config_path = work.config_path()?; let config_path = work.config_path()?;
@ -211,7 +211,7 @@ impl IWorkerRunner for WorkerRunner {
.await .await
.with_context(|| format!("unable to save task config: {}", config_path.display()))?; .with_context(|| format!("unable to save task config: {}", config_path.display()))?;
verbose!( debug!(
"wrote worker config to config_path = {}", "wrote worker config to config_path = {}",
config_path.display() config_path.display()
); );

View File

@ -418,7 +418,7 @@ macro_rules! log {
} }
#[macro_export] #[macro_export]
macro_rules! verbose { macro_rules! debug {
($($tt: tt)*) => {{ ($($tt: tt)*) => {{
let msg = format!($($tt)*); let msg = format!($($tt)*);
onefuzz_telemetry::log!(onefuzz_telemetry::Verbose, msg); onefuzz_telemetry::log!(onefuzz_telemetry::Verbose, msg);

View File

@ -107,12 +107,9 @@ pub async fn run_cmd<S: ::std::hash::BuildHasher>(
env: &HashMap<String, String, S>, env: &HashMap<String, String, S>,
timeout: Duration, timeout: Duration,
) -> Result<Output> { ) -> Result<Output> {
verbose!( debug!(
"running command with timeout: cmd:{:?} argv:{:?} env:{:?} timeout:{:?}", "running command with timeout: cmd:{:?} argv:{:?} env:{:?} timeout:{:?}",
program, program, argv, env, timeout
argv,
env,
timeout
); );
let mut cmd = Command::new(program); let mut cmd = Command::new(program);
@ -161,11 +158,11 @@ async fn monitor_stream(name: &str, context: &str, stream: impl AsyncRead + Unpi
} }
async fn wait_process(context: &str, process: Child, stopped: Option<&Notify>) -> Result<()> { async fn wait_process(context: &str, process: Child, stopped: Option<&Notify>) -> Result<()> {
verbose!("waiting for child: {}", context); debug!("waiting for child: {}", context);
let output = process.wait_with_output().await?; let output = process.wait_with_output().await?;
verbose!("child exited. {}:{:?}", context, output.status); debug!("child exited. {}:{:?}", context, output.status);
if let Some(stopped) = stopped { if let Some(stopped) = stopped {
stopped.notify(); stopped.notify();
} }

View File

@ -29,14 +29,14 @@ pub struct SyncedDir {
impl SyncedDir { impl SyncedDir {
pub async fn sync(&self, operation: SyncOperation, delete_dst: bool) -> Result<()> { pub async fn sync(&self, operation: SyncOperation, delete_dst: bool) -> Result<()> {
if self.url.is_none() { if self.url.is_none() {
verbose!("not syncing as SyncedDir is missing remote URL"); debug!("not syncing as SyncedDir is missing remote URL");
return Ok(()); return Ok(());
} }
let dir = &self.path; let dir = &self.path;
let url = self.url.as_ref().unwrap().url(); let url = self.url.as_ref().unwrap().url();
let url = url.as_ref(); let url = url.as_ref();
verbose!("syncing {:?} {}", operation, dir.display()); debug!("syncing {:?} {}", operation, dir.display());
match operation { match operation {
SyncOperation::Push => az_copy::sync(dir, url, delete_dst).await, SyncOperation::Push => az_copy::sync(dir, url, delete_dst).await,
SyncOperation::Pull => az_copy::sync(url, dir, delete_dst).await, SyncOperation::Pull => az_copy::sync(url, dir, delete_dst).await,
@ -85,7 +85,7 @@ impl SyncedDir {
delay_seconds: Option<u64>, delay_seconds: Option<u64>,
) -> Result<()> { ) -> Result<()> {
if self.url.is_none() { if self.url.is_none() {
verbose!("not continuously syncing, as SyncDir does not have a remote URL"); debug!("not continuously syncing, as SyncDir does not have a remote URL");
return Ok(()); return Ok(());
} }
@ -102,7 +102,7 @@ impl SyncedDir {
} }
async fn file_monitor_event(&self, event: Event) -> Result<()> { async fn file_monitor_event(&self, event: Event) -> Result<()> {
verbose!("monitoring {}", self.path.display()); debug!("monitoring {}", self.path.display());
let mut monitor = DirectoryMonitor::new(self.path.clone()); let mut monitor = DirectoryMonitor::new(self.path.clone());
monitor.start()?; monitor.start()?;
@ -137,14 +137,14 @@ impl SyncedDir {
/// a directory, and may reset it. /// a directory, and may reset it.
pub async fn monitor_results(&self, event: Event) -> Result<()> { pub async fn monitor_results(&self, event: Event) -> Result<()> {
loop { loop {
verbose!("waiting to monitor {}", self.path.display()); debug!("waiting to monitor {}", self.path.display());
while fs::metadata(&self.path).await.is_err() { while fs::metadata(&self.path).await.is_err() {
verbose!("dir {} not ready to monitor, delaying", self.path.display()); debug!("dir {} not ready to monitor, delaying", self.path.display());
delay_with_jitter(DELAY).await; delay_with_jitter(DELAY).await;
} }
verbose!("starting monitor for {}", self.path.display()); debug!("starting monitor for {}", self.path.display());
self.file_monitor_event(event.clone()).await?; self.file_monitor_event(event.clone()).await?;
} }
} }
@ -169,7 +169,7 @@ pub async fn continuous_sync(
} }
} }
if !should_loop { if !should_loop {
verbose!("not syncing as SyncDirs do not have remote URLs"); debug!("not syncing as SyncDirs do not have remote URLs");
return Ok(()); return Ok(());
} }