Mostly automated `clippy` fixes.
This commit is contained in:
George Pollard
2023-01-27 16:00:24 +13:00
committed by GitHub
parent e631ac6cd2
commit 00031156bc
71 changed files with 152 additions and 159 deletions

View File

@ -5,7 +5,7 @@ ARG VARIANT="ubuntu-22.04"
FROM mcr.microsoft.com/vscode/devcontainers/base:0-${VARIANT}
# note: keep this in sync with .github/workflows/ci.yml
ARG RUSTVERSION="1.66"
ARG RUSTVERSION="1.67"
# Needed for dotnet7; remove when available in Ubuntu
RUN wget https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb \

View File

@ -44,7 +44,7 @@ jobs:
uses: dtolnay/rust-toolchain@55c7845fad90d0ae8b2e83715cb900e5e861e8cb # pinned latest master as of 2022-10-08
# note: keep this in sync with .devcontainer/Dockerfile
with:
toolchain: 1.66
toolchain: 1.67
components: clippy, rustfmt, llvm-tools-preview
- name: Get Rust version & build version
shell: bash

View File

@ -38,7 +38,7 @@ fn main() -> Result<()> {
let blocks = provider.provide()?;
println!("block count = {}", blocks.len());
println!("blocks = {:x?}", blocks);
println!("blocks = {blocks:x?}");
Ok(())
}

View File

@ -34,7 +34,7 @@ fn main() -> Result<()> {
let mut provider = PeSancovBasicBlockProvider::new(&data, &pe, &mut pdb);
let blocks = provider.provide()?;
println!("blocks = {:x?}", blocks);
println!("blocks = {blocks:x?}");
Ok(())
}

View File

@ -64,7 +64,7 @@ fn main() -> Result<()> {
if opt.cobertura_xml {
let cobertura = coverage_legacy::cobertura::cobertura(src_coverage)?;
println!("{}", cobertura);
println!("{cobertura}");
} else {
for file_coverage in src_coverage.files {
for location in &file_coverage.locations {

View File

@ -151,7 +151,7 @@ impl<'c> Recorder<'c> {
.increment(breakpoint.module, breakpoint.offset);
} else {
let pc = if let Ok(pc) = dbg.read_program_counter() {
format!("{:x}", pc)
format!("{pc:x}")
} else {
"???".into()
};

View File

@ -52,7 +52,7 @@ impl RustcDemangler {
.map_err(|_| format_err!("unable to demangle rustc name"))?;
// Alternate formatter discards trailing hash.
Ok(format!("{:#}", name))
Ok(format!("{name:#}"))
}
}

View File

@ -64,7 +64,7 @@ fn main() -> Result<()> {
if args.dump_stdio {
if let Some(status) = &recorded.output.status {
println!("status = {}", status);
println!("status = {status}");
} else {
println!("status = <unavailable>");
}

View File

@ -102,8 +102,8 @@ impl std::ops::AddAssign for Count {
}
}
pub fn find_coverage_sites<'data>(
module: &dyn Module<'data>,
pub fn find_coverage_sites(
module: &dyn Module,
allowlist: &TargetAllowList,
) -> Result<ModuleBinaryCoverage> {
let debuginfo = module.debuginfo()?;

View File

@ -8,7 +8,7 @@ use std::collections::{BTreeMap, BTreeSet};
use crate::debuginfo::DebugInfo;
use crate::{Module, Offset};
pub fn sweep_module<'data>(module: &dyn Module<'data>, debuginfo: &DebugInfo) -> Result<Blocks> {
pub fn sweep_module(module: &dyn Module, debuginfo: &DebugInfo) -> Result<Blocks> {
let mut blocks = Blocks::default();
for function in debuginfo.functions() {
@ -19,8 +19,8 @@ pub fn sweep_module<'data>(module: &dyn Module<'data>, debuginfo: &DebugInfo) ->
Ok(blocks)
}
pub fn sweep_region<'data>(
module: &dyn Module<'data>,
pub fn sweep_region(
module: &dyn Module,
debuginfo: &DebugInfo,
offset: Offset,
size: u64,

View File

@ -101,7 +101,7 @@ impl Display for DebugStackFrame {
symbol.displacement(),
),
_ => {
write!(formatter, "{}+0x{:x}", module_name, module_offset)
write!(formatter, "{module_name}+0x{module_offset:x}")
}
},
DebugStackFrame::CorruptFrame => formatter.write_str("<corrupt frame(s)>"),
@ -114,7 +114,7 @@ impl Serialize for DebugStackFrame {
where
S: Serializer,
{
serializer.serialize_str(&format!("{}", self))
serializer.serialize_str(&format!("{self}"))
}
}
@ -156,7 +156,7 @@ impl Display for DebugStack {
writeln!(formatter)?;
}
first = false;
write!(formatter, "{}", frame)?;
write!(formatter, "{frame}")?;
}
Ok(())
}

View File

@ -34,7 +34,7 @@ fn main() -> Result<()> {
}
if let Some(path) = &opt.ld_library_path {
println!("setting LD_LIBRARY_PATH = \"{}\"", path);
println!("setting LD_LIBRARY_PATH = \"{path}\"");
cmd.env("LD_LIBRARY_PATH", path);
}
@ -44,7 +44,7 @@ fn main() -> Result<()> {
println!("no missing libraries");
} else {
for lib in missing {
println!("missing library: {:x?}", lib);
println!("missing library: {lib:x?}");
}
}

View File

@ -904,7 +904,7 @@ impl AppVerifierController {
enable_args.arg("-enable");
configure_args.arg("-configure");
for test in app_verifier_tests.iter() {
enable_args.arg(format!("{}", test));
enable_args.arg(format!("{test}"));
for stop_code in stop_codes(AppVerifierTest::from_str(test)?) {
configure_args.arg(format!("0x{:x}", *stop_code));

View File

@ -110,7 +110,7 @@ impl DebuggerResult {
for exception in &self.exceptions {
writeln!(file)?;
writeln!(file, "```")?;
writeln!(file, "{}", exception)?;
writeln!(file, "{exception}")?;
writeln!(file, "```")?;
}
writeln!(file)?;

View File

@ -118,11 +118,11 @@ pub enum ExceptionDescription {
impl fmt::Display for ExceptionDescription {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
ExceptionDescription::GenericException(code) => write!(formatter, "{:?}", code),
ExceptionDescription::VerifierStop(stop) => write!(formatter, "VerifierStop({})", stop),
ExceptionDescription::FastFail(code) => write!(formatter, "FastFail({:?})", code),
ExceptionDescription::Asan(code) => write!(formatter, "{:?}", code),
ExceptionDescription::Rtc(code) => write!(formatter, "{:?}", code),
ExceptionDescription::GenericException(code) => write!(formatter, "{code:?}"),
ExceptionDescription::VerifierStop(stop) => write!(formatter, "VerifierStop({stop})"),
ExceptionDescription::FastFail(code) => write!(formatter, "FastFail({code:?})"),
ExceptionDescription::Asan(code) => write!(formatter, "{code:?}"),
ExceptionDescription::Rtc(code) => write!(formatter, "{code:?}"),
}
}
}
@ -227,7 +227,7 @@ impl fmt::Display for Exception {
writeln!(formatter, " StackHash: {}", self.stack_hash)?;
writeln!(formatter, " Stack:")?;
for frame in &self.stack_frames {
writeln!(formatter, " {}", frame)?;
writeln!(formatter, " {frame}")?;
}
Ok(())
}
@ -267,9 +267,9 @@ impl ExitStatus {
impl fmt::Display for ExitStatus {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
ExitStatus::Code(c) => write!(formatter, "Exit code: {}", c),
ExitStatus::Signal(c) => write!(formatter, "Signal: {}", c),
ExitStatus::Timeout(sec) => write!(formatter, "Timeout: {}s", sec),
ExitStatus::Code(c) => write!(formatter, "Exit code: {c}"),
ExitStatus::Signal(c) => write!(formatter, "Signal: {c}"),
ExitStatus::Timeout(sec) => write!(formatter, "Timeout: {sec}s"),
}
}
}

View File

@ -195,10 +195,10 @@ pub enum VerifierStop {
impl fmt::Display for VerifierStop {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
VerifierStop::Heap(code) => write!(formatter, "{:?}", code),
VerifierStop::Handles(code) => write!(formatter, "{:?}", code),
VerifierStop::Leak(code) => write!(formatter, "{:?}", code),
VerifierStop::Exception(code) => write!(formatter, "{:?}", code),
VerifierStop::Heap(code) => write!(formatter, "{code:?}"),
VerifierStop::Handles(code) => write!(formatter, "{code:?}"),
VerifierStop::Leak(code) => write!(formatter, "{code:?}"),
VerifierStop::Exception(code) => write!(formatter, "{code:?}"),
VerifierStop::Unknown => write!(formatter, "Unknown"),
}
}

View File

@ -145,7 +145,7 @@ impl Tester {
) -> Result<(Summary, Vec<TestResult>)> {
let threads = max_cores.unwrap_or_else(num_cpus::get);
let threadpool = ThreadPoolBuilder::new()
.thread_name(|idx| format!("{}-{}", THREAD_POOL_NAME, idx))
.thread_name(|idx| format!("{THREAD_POOL_NAME}-{idx}"))
.num_threads(threads)
.build()?;
@ -397,7 +397,7 @@ impl Tester {
if let Some(appverif_controller) = &self.appverif_controller {
appverif_controller
.set(state)
.with_context(|| format!("Setting appverifier to {:?}", state))?;
.with_context(|| format!("Setting appverifier to {state:?}"))?;
}
Ok(())

View File

@ -29,8 +29,8 @@ fn read_file(filename: &str) -> Result<String, Box<dyn Error>> {
}
fn print_values(version: &str, sha: &str) {
println!("cargo:rustc-env=ONEFUZZ_VERSION={}", version);
println!("cargo:rustc-env=GIT_VERSION={}", sha);
println!("cargo:rustc-env=ONEFUZZ_VERSION={version}");
println!("cargo:rustc-env=GIT_VERSION={sha}");
}
fn print_version(include_sha: bool, include_local: bool, sha: &str) -> Result<(), Box<dyn Error>> {

View File

@ -236,7 +236,7 @@ impl DynamicConfig {
fn save_path(machine_id: Uuid) -> Result<PathBuf> {
Ok(onefuzz::fs::onefuzz_root()?
.join("etc")
.join(format!("dynamic-config-{}.json", machine_id)))
.join(format!("dynamic-config-{machine_id}.json")))
}
}

View File

@ -127,7 +127,7 @@ fn into_envelope(event: NodeEvent) -> NodeEventEnvelope {
fn print_json(data: impl serde::Serialize) -> Result<()> {
let json = serde_json::to_string_pretty(&data)?;
println!("{}", json);
println!("{json}");
Ok(())
}
@ -172,7 +172,7 @@ fn debug_run_worker(opt: RunWorkerOpt) -> Result<()> {
let events = rt.block_on(run_worker(work_set))?;
for event in events {
println!("{:?}", event);
println!("{event:?}");
}
Ok(())

View File

@ -32,5 +32,5 @@ pub fn is_agent_done(machine_id: Uuid) -> Result<bool> {
}
pub fn done_path(machine_id: Uuid) -> Result<PathBuf> {
Ok(onefuzz_root()?.join(format!("supervisor-is-done-{}", machine_id)))
Ok(onefuzz_root()?.join(format!("supervisor-is-done-{machine_id}")))
}

View File

@ -7,13 +7,13 @@ use std::{
use uuid::Uuid;
pub fn failure_path(machine_id: Uuid) -> Result<PathBuf> {
Ok(onefuzz_root()?.join(format!("onefuzz-agent-failure-{}.txt", machine_id)))
Ok(onefuzz_root()?.join(format!("onefuzz-agent-failure-{machine_id}.txt")))
}
pub fn save_failure(err: &Error, machine_id: Uuid) -> Result<()> {
error!("saving failure: {:?}", err);
let path = failure_path(machine_id)?;
let message = format!("{:?}", err);
let message = format!("{err:?}");
fs::write(&path, message)
.with_context(|| format!("unable to write failure log: {}", path.display()))
}

View File

@ -113,9 +113,9 @@ fn redirect(opt: RunOpt) -> Result<()> {
let run_id = Uuid::new_v4();
let stdout_path = log_path.join(format!("{}-stdout.txt", run_id));
let stderr_path = log_path.join(format!("{}-stdout.txt", run_id));
let failure_path = log_path.join(format!("{}-failure.txt", run_id));
let stdout_path = log_path.join(format!("{run_id}-stdout.txt"));
let stderr_path = log_path.join(format!("{run_id}-stdout.txt"));
let failure_path = log_path.join(format!("{run_id}-failure.txt"));
info!(
"saving output to files: {} {} {}",
@ -168,10 +168,7 @@ fn redirect(opt: RunOpt) -> Result<()> {
.append(true)
.open(failure_path)
.context("unable to open log file")?;
log.write_fmt(format_args!(
"onefuzz-agent child failed: {:?}",
exit_status
))?;
log.write_fmt(format_args!("onefuzz-agent child failed: {exit_status:?}"))?;
bail!("onefuzz-agent child failed: {:?}", exit_status);
}
@ -226,7 +223,7 @@ async fn load_config(opt: RunOpt) -> Result<StaticConfig> {
let machine_identity = opt_machine_id.map(|machine_id| MachineIdentity {
machine_id,
machine_name: opt_machine_name.unwrap_or(format!("{}", machine_id)),
machine_name: opt_machine_name.unwrap_or(format!("{machine_id}")),
scaleset_name: None,
});
@ -248,16 +245,13 @@ async fn check_existing_worksets(coordinator: &mut coordinator::Coordinator) ->
if let Some(work) = WorkSet::load_from_fs_context(coordinator.get_machine_id()).await? {
warn!("onefuzz-agent unexpectedly identified an existing workset on start");
let failure = match failure::read_failure(coordinator.get_machine_id()) {
Ok(value) => format!("onefuzz-agent failed: {}", value),
Ok(value) => format!("onefuzz-agent failed: {value}"),
Err(failure_err) => {
warn!("unable to read failure: {:?}", failure_err);
let logs = failure::read_logs().unwrap_or_else(|logs_err| {
format!(
"unable to read failure message or logs: {:?} {:?}",
failure_err, logs_err
)
format!("unable to read failure message or logs: {failure_err:?} {logs_err:?}")
});
format!("onefuzz-agent failed: {}", logs)
format!("onefuzz-agent failed: {logs}")
}
};

View File

@ -135,7 +135,7 @@ impl RebootContext {
}
fn reboot_context_path(machine_id: Uuid) -> Result<PathBuf> {
Ok(onefuzz::fs::onefuzz_root()?.join(format!("reboot_context_{}.json", machine_id)))
Ok(onefuzz::fs::onefuzz_root()?.join(format!("reboot_context_{machine_id}.json")))
}
#[cfg(test)]

View File

@ -46,7 +46,7 @@ impl fmt::Display for Scheduler {
Self::Busy(..) => "Scheduler::Busy",
Self::Done(..) => "Scheduler::Done",
};
write!(f, "{}", s)
write!(f, "{s}")
}
}
@ -213,7 +213,7 @@ impl State<SettingUp> {
// No script was executed.
}
Err(err) => {
let error = format!("{:?}", err);
let error = format!("{err:?}");
warn!("{}", error);
let cause = DoneCause::SetupError {
error,

View File

@ -32,7 +32,7 @@ impl WorkSet {
}
pub fn context_path(machine_id: Uuid) -> Result<PathBuf> {
Ok(onefuzz::fs::onefuzz_root()?.join(format!("workset_context-{}.json", machine_id)))
Ok(onefuzz::fs::onefuzz_root()?.join(format!("workset_context-{machine_id}.json")))
}
pub async fn load_from_fs_context(machine_id: Uuid) -> Result<Option<Self>> {
@ -109,7 +109,7 @@ pub struct WorkUnit {
impl WorkUnit {
pub fn working_dir(&self, machine_id: Uuid) -> Result<PathBuf> {
Ok(onefuzz::fs::onefuzz_root()?
.join(format!("{}", machine_id))
.join(format!("{machine_id}"))
.join(self.task_id.to_string()))
}

View File

@ -77,7 +77,7 @@ mod line_number {
where
S: Serializer,
{
let s = format!("{}", val);
let s = format!("{val}");
serializer.serialize_str(&s)
}

View File

@ -10,7 +10,7 @@ pub fn serialize<S>(val: &u64, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{:x}", val);
let s = format!("{val:x}");
serializer.serialize_str(&s)
}

View File

@ -29,8 +29,8 @@ fn read_file(filename: &str) -> Result<String, Box<dyn Error>> {
}
fn print_values(version: &str, sha: &str) {
println!("cargo:rustc-env=ONEFUZZ_VERSION={}", version);
println!("cargo:rustc-env=GIT_VERSION={}", sha);
println!("cargo:rustc-env=ONEFUZZ_VERSION={version}");
println!("cargo:rustc-env=GIT_VERSION={sha}");
}
fn print_version(include_sha: bool, include_local: bool, sha: &str) -> Result<(), Box<dyn Error>> {

View File

@ -43,7 +43,7 @@ pub async fn run(args: clap::ArgMatches<'static>) -> Result<()> {
let (cmd, sub_args) = args.subcommand();
let command =
Commands::from_str(cmd).with_context(|| format!("unexpected subcommand: {}", cmd))?;
Commands::from_str(cmd).with_context(|| format!("unexpected subcommand: {cmd}"))?;
let sub_args = sub_args
.ok_or_else(|| anyhow!("missing subcommand arguments"))?

View File

@ -177,7 +177,7 @@ pub fn get_synced_dirs(
let remote_path = path.absolutize()?;
let remote_url = Url::from_file_path(remote_path).expect("invalid file path");
let remote_blob_url = BlobContainerUrl::new(remote_url).expect("invalid url");
let path = current_dir.join(format!("{}/{}/{}_{}", job_id, task_id, name, index));
let path = current_dir.join(format!("{job_id}/{task_id}/{name}_{index}"));
Ok(SyncedDir {
remote_path: Some(remote_blob_url),
local_path: path,
@ -203,7 +203,7 @@ pub fn get_synced_dir(
let remote_url =
Url::from_file_path(remote_path).map_err(|_| anyhow!("invalid file path"))?;
let remote_blob_url = BlobContainerUrl::new(remote_url)?;
let path = std::env::current_dir()?.join(format!("{}/{}/{}", job_id, task_id, name));
let path = std::env::current_dir()?.join(format!("{job_id}/{task_id}/{name}"));
Ok(SyncedDir {
remote_path: Some(remote_blob_url),
local_path: path,
@ -263,7 +263,7 @@ pub async fn build_local_context(
min_available_memory_mb: 0,
};
let current_dir = current_dir()?;
let job_path = current_dir.join(format!("{}", job_id));
let job_path = current_dir.join(format!("{job_id}"));
Ok(LocalContext {
job_path,
common_config,

View File

@ -378,7 +378,7 @@ impl TerminalUi {
),
Span::raw(" "),
Span::styled(
format!("{}", count),
format!("{count}"),
Style::default().add_modifier(Modifier::BOLD),
),
Span::raw(", "),
@ -411,7 +411,7 @@ impl TerminalUi {
};
ListItem::new(Spans::from(vec![
Span::styled(format!("{:<9}", level), style),
Span::styled(format!("{level:<9}"), style),
Span::raw(" "),
Span::raw(log),
]))

View File

@ -158,7 +158,7 @@ async fn poll_inputs(
if let Some(message) = input_queue.pop().await? {
let input_url = message
.parse(|data| BlobUrl::parse(str::from_utf8(data)?))
.with_context(|| format!("unable to parse URL from queue: {:?}", message))?;
.with_context(|| format!("unable to parse URL from queue: {message:?}"))?;
if !already_checked(config, &input_url).await? {
let destination_path = _copy(input_url, &tmp_dir).await?;
@ -252,10 +252,10 @@ pub async fn run_tool(
info!("analyzing input with {:?}", cmd);
let output = cmd
.spawn()
.with_context(|| format!("analyzer failed to start: {}", analyzer_path))?;
.with_context(|| format!("analyzer failed to start: {analyzer_path}"))?;
monitor_process(output, "analyzer".to_string(), true, None)
.await
.with_context(|| format!("analyzer failed to run: {}", analyzer_path))?;
.with_context(|| format!("analyzer failed to run: {analyzer_path}"))?;
Ok(())
}

View File

@ -308,7 +308,7 @@ impl<'a> TaskContext<'a> {
let id = Uuid::new_v4();
let output_file_path =
intermediate_coverage_files_path(self.config.coverage.local_path.as_path())?
.join(format!("{}.cobertura.xml", id));
.join(format!("{id}.cobertura.xml"));
let target_options = expand.evaluate(&self.config.target_options)?;

View File

@ -208,10 +208,10 @@ impl GeneratorTask {
info!("Generating test cases with {:?}", generator);
let output = generator
.spawn()
.with_context(|| format!("generator failed to start: {}", generator_path))?;
.with_context(|| format!("generator failed to start: {generator_path}"))?;
monitor_process(output, "generator".to_string(), true, None)
.await
.with_context(|| format!("generator failed to run: {}", generator_path))?;
.with_context(|| format!("generator failed to run: {generator_path}"))?;
Ok(())
}

View File

@ -269,7 +269,7 @@ async fn start_supervisor(
info!("starting supervisor '{:?}'", cmd);
let child = cmd
.spawn()
.with_context(|| format!("supervisor failed to start: {:?}", cmd))?;
.with_context(|| format!("supervisor failed to start: {cmd:?}"))?;
Ok(child)
}

View File

@ -81,7 +81,7 @@ impl<'a, M> fmt::Display for Event<'a, M> {
impl<'a, M> fmt::Debug for Event<'a, M> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self)
write!(f, "{self}")
}
}

View File

@ -104,7 +104,7 @@ fn fixture() -> InputPoller<Msg> {
}
fn url_fixture(msg: Msg) -> Url {
Url::parse(&format!("https://azure.com/c/{}", msg)).unwrap()
Url::parse(&format!("https://azure.com/c/{msg}")).unwrap()
}
fn input_fixture(dir: &Path, msg: Msg) -> PathBuf {

View File

@ -142,7 +142,7 @@ pub async fn handle_crash_reports(
let original_crash_test_result = parse_report_file(file.path())
.await
.with_context(|| format!("unable to parse crash report: {}", file_name))?;
.with_context(|| format!("unable to parse crash report: {file_name}"))?;
let input_blob = match &original_crash_test_result {
CrashTestResult::CrashReport(x) => x.input_blob.clone(),

View File

@ -170,7 +170,7 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
task_id,
job_id,
tries: 1 + args.check_retry_count,
error: test_report.error.map(|e| format!("{}", e)),
error: test_report.error.map(|e| format!("{e}")),
};
Ok(CrashTestResult::NoRepro(Box::new(no_repro)))

View File

@ -174,7 +174,7 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
task_id,
job_id,
tries: 1 + args.check_retry_count,
error: test_report.error.map(|e| format!("{}", e)),
error: test_report.error.map(|e| format!("{e}")),
};
Ok(CrashTestResult::NoRepro(Box::new(no_repro)))

View File

@ -67,7 +67,7 @@ impl BlobLogWriter {
max_log_size: u64,
) -> Result<Self> {
let container_client = TaskLogger::create_container_client(&log_container)?;
let prefix = format!("{}/{}", task_id, machine_id);
let prefix = format!("{task_id}/{machine_id}");
let pages: Vec<ListBlobsResponse> = container_client
.list_blobs()
.prefix(prefix.clone())
@ -100,7 +100,7 @@ impl BlobLogWriter {
let blob_id = match blob_ids.into_iter().last() {
Some(id) => id,
None => {
let blob_client = container_client.blob_client(format!("{}/1.log", prefix));
let blob_client = container_client.blob_client(format!("{prefix}/1.log"));
blob_client
.put_append_blob()
.await
@ -135,7 +135,7 @@ impl LogWriter<BlobLogWriter> for BlobLogWriter {
.data
.iter()
.map(|p| p.as_values())
.map(|(name, val)| format!("{} {}", name, val))
.map(|(name, val)| format!("{name} {val}"))
.collect::<Vec<_>>()
.join(", ")
)
@ -353,8 +353,7 @@ impl TaskLogger {
timestamp: chrono::Utc::now(),
level: log::Level::Info,
message: format!(
"onefuzz task logger: Skipped {} traces/events",
skipped_messages_count
"onefuzz task logger: Skipped {skipped_messages_count} traces/events"
),
};
@ -591,9 +590,9 @@ mod tests {
let x = events.read().unwrap();
for (k, values) in x.iter() {
println!("{}", k);
println!("{k}");
for v in values {
println!(" {:?}", v);
println!(" {v:?}");
}
}
@ -649,9 +648,9 @@ mod tests {
let x = events.read().unwrap();
for (k, values) in x.iter() {
println!("{}", k);
println!("{k}");
for v in values {
println!(" {:?}", v);
println!(" {v:?}");
}
}

View File

@ -552,7 +552,7 @@ pub fn format_events(events: &[EventData]) -> String {
events
.iter()
.map(|x| x.as_values())
.map(|(x, y)| format!("{}:{}", x, y))
.map(|(x, y)| format!("{x}:{y}"))
.collect::<Vec<String>>()
.join(" ")
}

View File

@ -1,5 +1,5 @@
fn main() {
let bytes = onefuzz::memory::available_bytes().unwrap();
let gb = (bytes as f64) * 1e-9;
println!("available bytes: {} ({:.1} GB)", bytes, gb);
println!("available bytes: {bytes} ({gb:.1} GB)");
}

View File

@ -80,7 +80,7 @@ async fn main() -> Result<()> {
let text: &str = crash.text.as_deref().unwrap_or_default();
println!(" sanitizer = {}", crash.sanitizer);
println!(" summary = {}", crash.summary);
println!(" text = {}", text);
println!(" text = {text}");
} else {
println!("[-] no crash detected.");
}
@ -88,7 +88,7 @@ async fn main() -> Result<()> {
println!();
println!("[+] verbose test result:");
println!();
println!("{:?}", test_result);
println!("{test_result:?}");
Ok(())
}

View File

@ -27,10 +27,10 @@ pub fn add_asan_log_env<S: BuildHasher>(env: &mut HashMap<String, String, S>, as
let re = regex::Regex::new(r"^(?P<d>[a-zA-Z]):\\").expect("static regex parse failed");
let network_path = re.replace(&asan_path_as_str, "\\\\127.0.0.1\\$d$\\");
if let Some(v) = env.get_mut("ASAN_OPTIONS") {
let log_path = format!(":log_path={}", network_path);
let log_path = format!(":log_path={network_path}");
v.push_str(&log_path);
} else {
let log_path = format!("log_path={}", network_path);
let log_path = format!("log_path={network_path}");
env.insert("ASAN_OPTIONS".to_string(), log_path);
}
}
@ -40,10 +40,10 @@ pub fn add_asan_log_env<S: BuildHasher>(env: &mut HashMap<String, String, S>, as
let asan_path = asan_dir.join("asan-log");
let asan_path_as_str = asan_path.to_string_lossy();
if let Some(v) = env.get_mut("ASAN_OPTIONS") {
let log_path = format!(":log_path={}", asan_path_as_str);
let log_path = format!(":log_path={asan_path_as_str}");
v.push_str(&log_path);
} else {
let log_path = format!("log_path={}", asan_path_as_str);
let log_path = format!("log_path={asan_path_as_str}");
env.insert("ASAN_OPTIONS".to_string(), log_path);
}
}

View File

@ -113,11 +113,11 @@ impl ClientCredentials {
pub async fn access_token(&self) -> Result<AccessToken> {
let (authority, scope) = {
let url = Url::parse(&self.resource.clone())?;
let port = url.port().map(|p| format!(":{}", p)).unwrap_or_default();
let port = url.port().map(|p| format!(":{p}")).unwrap_or_default();
let host_name = url.host_str().ok_or_else(|| {
anyhow::format_err!("resource URL does not have a host string: {}", url)
})?;
let host = format!("{}{}", host_name, port);
let host = format!("{host_name}{port}");
if let Some(domain) = &self.multi_tenant_domain {
let instance: Vec<&str> = host.split('.').collect();
(
@ -125,7 +125,7 @@ impl ClientCredentials {
format!("api://{}/{}/", &domain, instance[0]),
)
} else {
(self.tenant.clone(), format!("api://{}/", host))
(self.tenant.clone(), format!("api://{host}/"))
}
};
@ -141,7 +141,7 @@ impl ClientCredentials {
("client_secret", self.client_secret.expose_ref().to_string()),
("grant_type", "client_credentials".into()),
("tenant", authority),
("scope", format!("{}.default", scope)),
("scope", format!("{scope}.default")),
])
.send_retry_default()
.await
@ -191,7 +191,7 @@ impl ManagedIdentityCredentials {
let instance: Vec<&str> = host.split('.').collect();
format!("api://{}/{}", domain, instance[0])
} else {
format!("api://{}", host)
format!("api://{host}")
}
};

View File

@ -41,7 +41,7 @@ impl fmt::Display for Mode {
Mode::Copy => "copy",
Mode::Sync => "sync",
};
write!(f, "{}", as_str)
write!(f, "{as_str}")
}
}
@ -99,7 +99,7 @@ async fn az_impl(mode: Mode, src: &OsStr, dst: &OsStr, args: &[&str]) -> Result<
let stderr = String::from_utf8_lossy(&output.stderr);
let logfile = read_azcopy_log_file(temp_dir.path())
.await
.unwrap_or_else(|e| format!("unable to read azcopy log file from: {:?}", e));
.unwrap_or_else(|e| format!("unable to read azcopy log file from: {e:?}"));
let src = redact_azcopy_sas_arg(src);
let dst = redact_azcopy_sas_arg(dst);
@ -121,7 +121,7 @@ async fn az_impl(mode: Mode, src: &OsStr, dst: &OsStr, args: &[&str]) -> Result<
// Work around issues where azcopy fails with an error we should consider
// "acceptable" to always retry on.
fn should_always_retry(err: &anyhow::Error) -> bool {
let as_string = format!("{:?}", err);
let as_string = format!("{err:?}");
for value in ALWAYS_RETRY_ERROR_STRINGS {
if as_string.contains(value) {
info!(
@ -183,7 +183,7 @@ async fn retry_az_impl(mode: Mode, src: &OsStr, dst: &OsStr, args: &[&str]) -> R
},
)
.await
.with_context(|| format!("azcopy failed after retrying. mode: {}", mode))?;
.with_context(|| format!("azcopy failed after retrying. mode: {mode}"))?;
Ok(())
}

View File

@ -95,7 +95,7 @@ impl BlobClient {
let file_stream = codec::FramedRead::new(reader, codec).map_ok(bytes::BytesMut::freeze);
let body = reqwest::Body::wrap_stream(file_stream);
let content_length = format!("{}", file_len);
let content_length = format!("{file_len}");
self.put(file_url)
.header("Content-Length", &content_length)

View File

@ -28,8 +28,7 @@ impl BlobUrl {
pub fn from_blob_info(account: &str, container: &str, name: &str) -> Result<Self> {
// format https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata#resource-uri-syntax
let url = Url::parse(&format!(
"https://{}.blob.core.windows.net/{}/{}",
account, container, name
"https://{account}.blob.core.windows.net/{container}/{name}"
))?;
Self::new(url)
}
@ -202,9 +201,9 @@ impl fmt::Debug for BlobContainerUrl {
impl fmt::Display for BlobContainerUrl {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(file_path) = self.as_file_path() {
write!(f, "{:?}", file_path)
write!(f, "{file_path:?}")
} else if let (Some(account), Some(container)) = (self.account(), self.container()) {
write!(f, "{}:{}", account, container)
write!(f, "{account}:{container}")
} else {
panic!("invalid blob url")
}
@ -426,7 +425,7 @@ mod tests {
#[test]
fn test_blob_url() {
for url in invalid_blob_urls() {
println!("{:?}", url);
println!("{url:?}");
assert!(BlobUrl::new(url).is_err());
}

View File

@ -350,7 +350,7 @@ impl<'a> Expand<'a> {
let path = String::from(
dunce::canonicalize(v)
.with_context(|| {
format!("unable to canonicalize path during extension: {}", v)
format!("unable to canonicalize path during extension: {v}")
})?
.to_string_lossy(),
);
@ -390,7 +390,7 @@ impl<'a> Expand<'a> {
(true, Some(ev)) => {
arg = self
.replace_value(fmtstr, arg.clone(), ev)
.with_context(|| format!("replace_value failed: {} {}", fmtstr, arg))?
.with_context(|| format!("replace_value failed: {fmtstr} {arg}"))?
}
(true, None) => bail!("missing argument {}", fmtstr),
(false, _) => (),
@ -438,8 +438,7 @@ mod tests {
let expected = vec!["a b c"];
assert_eq!(
result, expected,
"result: {:?} expected: {:?}",
result, expected
"result: {result:?} expected: {expected:?}"
);
Ok(())
}
@ -489,8 +488,7 @@ mod tests {
let input_full_path = dunce::canonicalize(input_path).context("canonicalize failed")?;
let expected_input = input_full_path.to_string_lossy();
let expected_options = format!(
"inner {} then {} {}",
expected_input_corpus, expected_generated_inputs, expected_input
"inner {expected_input_corpus} then {expected_generated_inputs} {expected_input}"
);
assert_eq!(

View File

@ -249,7 +249,7 @@ impl<'a> Tester<'a> {
.iter()
.enumerate()
.map(|(idx, frame)| StackEntry {
line: format!("#{} {}", idx, frame),
line: format!("#{idx} {frame}"),
address: Some(frame.addr.0),
function_name: frame.function.as_ref().map(|x| x.name.clone()),
function_offset: frame.function.as_ref().map(|x| x.offset),

View File

@ -138,7 +138,7 @@ impl LibFuzzer {
.iter()
.any(|o| o.starts_with("-max_total_time"))
{
cmd.arg(format!("-max_total_time={}", DEFAULT_MAX_TOTAL_SECONDS));
cmd.arg(format!("-max_total_time={DEFAULT_MAX_TOTAL_SECONDS}"));
}
Ok(cmd)
@ -171,7 +171,7 @@ impl LibFuzzer {
seen_inputs = true;
}
} else {
println!("input dir doesn't exist: {:?}", input_dir);
println!("input dir doesn't exist: {input_dir:?}");
}
}
}

View File

@ -126,7 +126,7 @@ pub async fn run_cmd<S: ::std::hash::BuildHasher>(
let runner = tokio::task::spawn_blocking(move || {
let child = cmd
.spawn()
.with_context(|| format!("process failed to start: {}", program_name))?;
.with_context(|| format!("process failed to start: {program_name}"))?;
child
.controlled_with_output()

View File

@ -130,7 +130,7 @@ mod tests {
assert_eq!(vars["ASAN_SYMBOLIZER_PATH"], SYMBOLIZER_PATH);
let tsan_options = format!("external_symbolizer_path={}", SYMBOLIZER_PATH);
let tsan_options = format!("external_symbolizer_path={SYMBOLIZER_PATH}");
assert_eq!(vars["TSAN_OPTIONS"], tsan_options);
assert_eq!(vars.len(), 2);

View File

@ -60,7 +60,7 @@ impl SyncedDir {
})?
};
BlobContainerUrl::new(url.clone())
.with_context(|| format!("unable to create BlobContainerUrl: {}", url))?
.with_context(|| format!("unable to create BlobContainerUrl: {url}"))?
}
};
Ok(url)
@ -258,8 +258,7 @@ impl SyncedDir {
let destination = path.join(file_name);
if let Err(err) = fs::copy(&item, &destination).await {
let error_message = format!(
"Couldn't upload file. path:{:?} dir:{:?} err:{:?}",
item, destination, err
"Couldn't upload file. path:{item:?} dir:{destination:?} err:{err:?}"
);
if !item.exists() {

View File

@ -362,7 +362,7 @@ mod se {
S: Serializer,
T: std::fmt::LowerHex,
{
s.serialize_str(&format!("0x{:x}", t))
s.serialize_str(&format!("0x{t:x}"))
}
}

View File

@ -36,13 +36,13 @@ impl BlobUploader {
let url = {
let url_path = self.url.path();
let blob_path = format!("{}/{}", url_path, file_name);
let blob_path = format!("{url_path}/{file_name}");
let mut url = self.url.clone();
url.set_path(&blob_path);
url
};
let content_length = format!("{}", file_len);
let content_length = format!("{file_len}");
let resp = send_retry_reqwest(
|| {

View File

@ -2,7 +2,7 @@
name = "reqwest-retry"
version = "0.1.0"
authors = ["fuzzing@microsoft.com"]
edition = "2018"
edition = "2021"
license = "MIT"
[dependencies]
@ -11,7 +11,11 @@ async-trait = "0.1"
backoff = { version = "0.4", features = ["tokio"] }
log = "0.4"
onefuzz-telemetry = { path = "../onefuzz-telemetry" }
reqwest = { version = "0.11", features = ["json", "stream", "native-tls-vendored"], default-features=false }
reqwest = { version = "0.11", features = [
"json",
"stream",
"native-tls-vendored",
], default-features = false }
thiserror = "1.0"
[dev-dependencies]

View File

@ -273,7 +273,7 @@ mod test {
anyhow::bail!("response should have failed: {:?}", result);
}
Err(err) => {
let as_text = format!("{:?}", err);
let as_text = format!("{err:?}");
assert!(as_text.contains("request attempt 4 failed"), "{}", as_text);
}
}
@ -294,7 +294,7 @@ mod test {
anyhow::bail!("response should have failed: {:?}", result);
}
Err(err) => {
let as_text = format!("{:?}", err);
let as_text = format!("{err:?}");
assert!(as_text.contains("request attempt 4 failed"), "{}", as_text);
}
}
@ -310,8 +310,8 @@ mod test {
.send_retry(always_fail, Duration::from_millis(1), 3)
.await;
assert!(resp.is_err(), "{:?}", resp);
let as_text = format!("{:?}", resp);
assert!(resp.is_err(), "{resp:?}");
let as_text = format!("{resp:?}");
assert!(as_text.contains("request attempt 1 failed"), "{}", as_text);
Ok(())
}
@ -336,8 +336,8 @@ mod test {
.send_retry(succeed_400, Duration::from_millis(1), 3)
.await;
assert!(resp.is_err(), "{:?}", resp);
let as_text = format!("{:?}", resp);
assert!(resp.is_err(), "{resp:?}");
let as_text = format!("{resp:?}");
assert!(as_text.contains("request attempt 4 failed"), "{}", as_text);
Ok(())
}

View File

@ -25,9 +25,9 @@ fn main() {
// create all the likely module base names -- do we care about mixed case
// here?
let bare = pdb_path.file_stem().unwrap().to_string_lossy();
let exe = format!("{}.exe", bare);
let dll = format!("{}.dll", bare);
let sys = format!("{}.sys", bare);
let exe = format!("{bare}.exe");
let dll = format!("{bare}.dll");
let sys = format!("{bare}.sys");
// create our new SrcView and insert our only pdb into it
// we don't know what the modoff module will be, so create a mapping from

View File

@ -24,7 +24,7 @@ fn main() {
for modoff in &modoffs {
print!(" +{:04x} ", modoff.offset);
match srcview.modoff(modoff) {
Some(srcloc) => println!("{}", srcloc),
Some(srcloc) => println!("{srcloc}"),
None => println!(),
}
}

View File

@ -112,7 +112,7 @@ fn add_common_extensions(srcview: &mut SrcView, pdb_path: &Path) -> Result<()> {
srcview.insert(&stem, pdb_path)?;
// add common module extensions
for ext in ["sys", "exe", "dll"] {
srcview.insert(&format!("{}.{}", stem, ext), pdb_path)?;
srcview.insert(&format!("{stem}.{ext}"), pdb_path)?;
}
Ok(())
}
@ -132,7 +132,7 @@ fn srcloc(opts: SrcLocOpt) -> Result<()> {
for modoff in &modoffs {
print!(" +{:04x} ", modoff.offset);
match srcview.modoff(modoff) {
Some(srcloc) => println!("{}", srcloc),
Some(srcloc) => println!("{srcloc}"),
None => println!(),
}
}

View File

@ -394,7 +394,7 @@ impl Report {
("branches-valid", "0"),
("branches-covered", "0"),
("branch-rate", "0"),
("timestamp", format!("{}", unixtime).as_str()),
("timestamp", format!("{unixtime}").as_str()),
("complexity", "0"),
("version", "0.1"),
]),

View File

@ -236,7 +236,7 @@ impl SrcView {
for (module, cache) in self.0.iter() {
if let Some(symbols) = cache.path_symbols(path.as_ref()) {
for sym in symbols {
r.insert(format!("{}!{}", module, sym));
r.insert(format!("{module}!{sym}"));
}
}
}

View File

@ -268,7 +268,7 @@ mod tests {
for (data, expected) in test_cases {
let parsed = parse_asan_call_stack(data)
.with_context(|| format!("parsing asan stack failed {}", data))?;
.with_context(|| format!("parsing asan stack failed {data}"))?;
assert_eq!(expected, parsed);
}

View File

@ -9,7 +9,7 @@ fn main() -> Result<()> {
for filename in env::args().skip(1) {
let data = fs::read_to_string(&filename)?;
let asan = CrashLog::parse(data)?;
eprintln!("{}", filename);
eprintln!("{filename}");
println!("{}", serde_json::to_string_pretty(&asan)?);
}

View File

@ -50,10 +50,10 @@ impl StackEntry {
source.push(source_file_name.clone());
}
if let Some(source_file_line) = self.source_file_line {
source.push(format!("{}", source_file_line));
source.push(format!("{source_file_line}"));
}
if let Some(function_offset) = self.function_offset {
source.push(format!("{}", function_offset));
source.push(format!("{function_offset}"));
}
if !source.is_empty() {
@ -323,7 +323,7 @@ mod tests {
let file_name = path.file_name().unwrap().to_str().unwrap();
if skip_files.contains(&file_name) {
eprintln!("skipping file: {}", file_name);
eprintln!("skipping file: {file_name}");
continue;
}

View File

@ -121,7 +121,7 @@ impl AzureQueueClient {
let http = Client::new();
let messages_url = {
let queue_path = queue_url.path();
let messages_path = format!("{}/messages", queue_path);
let messages_path = format!("{queue_path}/messages");
let mut url = queue_url;
url.set_path(&messages_path);
url

View File

@ -81,7 +81,7 @@ impl FileQueueClient {
max_elapsed_time: Some(MAX_ELAPSED_TIME),
..ExponentialBackoff::default()
};
let notify = |err, _| println!("IO error: {}", err);
let notify = |err, _| println!("IO error: {err}");
retry_notify(backoff, send_data, notify).await?;
Ok(())
@ -110,7 +110,7 @@ impl FileQueueClient {
max_elapsed_time: Some(MAX_ELAPSED_TIME),
..ExponentialBackoff::default()
};
let notify = |err, _| println!("IO error: {}", err);
let notify = |err, _| println!("IO error: {err}");
let result = retry_notify(backoff, receive_data, notify).await?;
Ok(result)

View File

@ -41,8 +41,8 @@ fn print_version(include_sha: bool, include_local: bool) -> Result<(), Box<dyn E
}
}
println!("cargo:rustc-env=GIT_VERSION={}", sha);
println!("cargo:rustc-env=ONEFUZZ_VERSION={}", version);
println!("cargo:rustc-env=GIT_VERSION={sha}");
println!("cargo:rustc-env=ONEFUZZ_VERSION={version}");
Ok(())
}