refactor(snix): address upcoming clippy lints

This uses variables inside `format!()` strings directly,
and switches to using `std::io::Error::other(e.to_string())`
instead of std::io::Error::new(io::ErrorKind::Other, e.to_string()).

It also found an unnecessary `mut` in the test harness.

Change-Id: I406c709091e87ec64bfbdbcdc7f460af9b83d3bc
Reviewed-on: https://cl.snix.dev/c/snix/+/30630
Autosubmit: Florian Klink <flokli@flokli.de>
Tested-by: besadii
Reviewed-by: Ryan Lahfa <ryan@lahfa.xyz>
This commit is contained in:
Florian Klink 2025-07-31 13:33:24 +02:00 committed by clbot
parent 632bc09350
commit a9d5c184d5
77 changed files with 224 additions and 325 deletions

View file

@ -11,8 +11,7 @@ pub struct DummyBuildService {}
impl BuildService for DummyBuildService {
#[instrument(skip(self), ret, err)]
async fn do_build(&self, _request: BuildRequest) -> std::io::Result<BuildResult> {
Err(std::io::Error::new(
std::io::ErrorKind::Other,
Err(std::io::Error::other(
"builds are not supported with DummyBuildService",
))
}

View file

@ -24,8 +24,8 @@ where
BS: BlobService + Send + Sync + Clone + 'static,
DS: DirectoryService + Send + Sync + Clone + 'static,
{
let url = Url::parse(uri)
.map_err(|e| std::io::Error::other(format!("unable to parse url: {}", e)))?;
let url =
Url::parse(uri).map_err(|e| std::io::Error::other(format!("unable to parse url: {e}")))?;
Ok(match url.scheme() {
// dummy doesn't care about parameters.

View file

@ -132,10 +132,7 @@ where
warn!(stdout=%stdout, stderr=%stderr, exit_code=%child_output.status, "build failed");
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"nonzero exit code".to_string(),
));
return Err(std::io::Error::other("nonzero exit code".to_string()));
}
// Ingest build outputs into the castore.
@ -161,7 +158,7 @@ where
.map_err(|e| {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("Unable to ingest output: {}", e),
format!("Unable to ingest output: {e}"),
)
})?,

View file

@ -125,7 +125,7 @@ fn configure_process<'a>(
.args(command_args)
.env(
env.into_iter()
.map(|(k, v)| format!("{}={}", k, v))
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>(),
)
.terminal(true)

View file

@ -115,7 +115,7 @@ mod tests {
use std::io::Write;
let mut file = tempfile::NamedTempFile::new().expect("Could not create tempfile");
for line in content.into_iter() {
writeln!(file, "{}", line).expect("");
writeln!(file, "{line}").expect("");
}
file
}

View file

@ -20,7 +20,7 @@ pub async fn from_addr(
uri: &str,
) -> Result<Arc<dyn BlobService>, Box<dyn std::error::Error + Send + Sync>> {
let url = Url::parse(uri)
.map_err(|e| crate::Error::StorageError(format!("unable to parse url: {}", e)))?;
.map_err(|e| crate::Error::StorageError(format!("unable to parse url: {e}")))?;
let blob_service_config = with_registry(&REG, || {
<DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn BlobService>>>>::try_from(url)

View file

@ -64,7 +64,7 @@ where
{
Ok(_blob_meta) => Ok(true),
Err(e) if e.code() == Code::NotFound => Ok(false),
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
Err(e) => Err(io::Error::other(e)),
}
}
@ -106,7 +106,7 @@ where
Ok(Some(Box::new(Cursor::new(buf))))
}
Err(e) if e.code() == Code::NotFound => Ok(None),
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
Err(e) => Err(io::Error::other(e)),
};
}
@ -175,7 +175,7 @@ where
match resp {
Err(e) if e.code() == Code::NotFound => Ok(None),
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
Err(e) => Err(io::Error::other(e)),
Ok(resp) => {
let resp = resp.into_inner();
@ -259,15 +259,14 @@ impl<W: tokio::io::AsyncWrite + Send + Sync + Unpin + 'static> BlobWriter for GR
// return the digest from the response, and store it in self.digest for subsequent closes.
let digest_len = resp.digest.len();
let digest: B3Digest = resp.digest.try_into().map_err(|_| {
io::Error::new(
io::ErrorKind::Other,
format!("invalid root digest length {} in response", digest_len),
)
io::Error::other(format!(
"invalid root digest length {digest_len} in response"
))
})?;
self.digest = Some(digest.clone());
Ok(digest)
}
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e.to_string())),
Err(e) => Err(io::Error::other(e.to_string())),
}
}
}

View file

@ -98,7 +98,7 @@ async fn put_has_get(blob_service: impl BlobService) {
"blob service should now have the blob"
);
let mut r = blob_service
let r = blob_service
.open_read(blob_digest)
.await
.expect("open_read must succeed")

View file

@ -155,7 +155,7 @@ impl<'r, 'de: 'r, T: 'static> SeedFactory<'de, TagString<'de>> for RegistryWithF
.0
.iter()
.find(|(k, _)| *k == &(TypeId::of::<T>(), tag.as_ref()))
.ok_or_else(|| serde::de::Error::custom(format!("Unknown type: {}", tag)))?
.ok_or_else(|| serde::de::Error::custom(format!("Unknown type: {tag}")))?
.1;
let entry: &RegistryEntry<T> = <dyn Any>::downcast_ref(&**seed).unwrap();

View file

@ -202,7 +202,7 @@ impl DirectoryService for BigtableDirectoryService {
let mut response = client
.read_rows(request)
.await
.map_err(|e| Error::StorageError(format!("unable to read rows: {}", e)))?;
.map_err(|e| Error::StorageError(format!("unable to read rows: {e}")))?;
if response.len() != 1 {
if response.len() > 1 {
@ -244,17 +244,14 @@ impl DirectoryService for BigtableDirectoryService {
// For the data in that cell, ensure the digest matches what's requested, before parsing.
let got_digest = B3Digest::from(blake3::hash(&row_cell.value).as_bytes());
if got_digest != *digest {
return Err(Error::StorageError(format!(
"invalid digest: {}",
got_digest
)));
return Err(Error::StorageError(format!("invalid digest: {got_digest}")));
}
// Try to parse the value into a Directory message.
let directory = proto::Directory::decode(Bytes::from(row_cell.value))
.map_err(|e| Error::StorageError(format!("unable to decode directory proto: {}", e)))?
.map_err(|e| Error::StorageError(format!("unable to decode directory proto: {e}")))?
.try_into()
.map_err(|e| Error::StorageError(format!("invalid Directory message: {}", e)))?;
.map_err(|e| Error::StorageError(format!("invalid Directory message: {e}")))?;
Ok(Some(directory))
}
@ -301,7 +298,7 @@ impl DirectoryService for BigtableDirectoryService {
],
})
.await
.map_err(|e| Error::StorageError(format!("unable to mutate rows: {}", e)))?;
.map_err(|e| Error::StorageError(format!("unable to mutate rows: {e}")))?;
if resp.predicate_matched {
trace!("already existed")
@ -376,7 +373,7 @@ impl TryFrom<url::Url> for BigtableParameters {
.append_pair("instance_name", &instance_name);
let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default())
.map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?;
.map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {e}")))?;
Ok(params)
}

View file

@ -28,7 +28,7 @@ pub async fn from_addr(
) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync>> {
#[allow(unused_mut)]
let mut url = Url::parse(uri)
.map_err(|e| crate::Error::StorageError(format!("unable to parse url: {}", e)))?;
.map_err(|e| crate::Error::StorageError(format!("unable to parse url: {e}")))?;
let directory_service_config = with_registry(&REG, || {
<DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>>>::try_from(

View file

@ -71,8 +71,7 @@ where
let actual_digest = directory.digest();
if actual_digest != digest {
Err(crate::Error::StorageError(format!(
"requested directory with digest {}, but got {}",
digest, actual_digest
"requested directory with digest {digest}, but got {actual_digest}"
)))
} else {
Ok(Some(directory.try_into().map_err(|_| {
@ -141,8 +140,7 @@ where
// it if it's in received_directory_digests (as that
// means it once was in expected_directory_digests)
Err(crate::Error::StorageError(format!(
"received unexpected directory {}",
directory_digest
"received unexpected directory {directory_digest}"
)))?;
}
received_directory_digests.insert(directory_digest);
@ -177,8 +175,7 @@ where
// If this is not empty, then the closure is incomplete
if diff_len != 0 {
Err(crate::Error::StorageError(format!(
"still expected {} directories, but got premature end of stream",
diff_len
"still expected {diff_len} directories, but got premature end of stream"
)))?
} else {
return

View file

@ -34,13 +34,12 @@ impl DirectoryService for MemoryDirectoryService {
let actual_digest = directory.digest();
if actual_digest != *digest {
return Err(Error::StorageError(format!(
"requested directory with digest {}, but got {}",
digest, actual_digest
"requested directory with digest {digest}, but got {actual_digest}"
)));
}
Ok(Some(directory.clone().try_into().map_err(|e| {
crate::Error::StorageError(format!("corrupted directory: {}", e))
crate::Error::StorageError(format!("corrupted directory: {e}"))
})?))
}
}

View file

@ -152,8 +152,7 @@ impl DirectoryService for ObjectStoreDirectoryService {
// Ensure to only decode the directory objects whose digests we trust
if !order_validator.digest_allowed(&digest) {
return Err(crate::Error::StorageError(format!(
"received unexpected directory {}",
digest
"received unexpected directory {digest}"
)));
}

View file

@ -167,7 +167,7 @@ async fn put_get_foo(directory_service: impl DirectoryService) {
],
];
if !valid_closures.contains(&retrieved_closure) {
panic!("invalid closure returned: {:?}", retrieved_closure);
panic!("invalid closure returned: {retrieved_closure:?}");
}
}

View file

@ -26,7 +26,7 @@ where
// If we didn't get the directory node that's linked, that's a store inconsistency, bail out!
warn!(directory.digest = %digest, "directory does not exist");
Error::StorageError(format!("directory {} does not exist", digest))
Error::StorageError(format!("directory {digest} does not exist"))
})?;
// look for the component in the [Directory].

View file

@ -33,8 +33,7 @@ pub fn traverse_directory<'a, DS: DirectoryService + 'static>(
let current_directory = match directory_service.get(&current_directory_digest).await.map_err(|e| {
warn!("failed to look up directory");
Error::StorageError(format!(
"unable to look up directory {}: {}",
current_directory_digest, e
"unable to look up directory {current_directory_digest}: {e}"
))
})? {
// the root node of the requested closure was not found, return an empty list
@ -43,8 +42,7 @@ pub fn traverse_directory<'a, DS: DirectoryService + 'static>(
None => {
warn!("directory {} does not exist", current_directory_digest);
Err(Error::StorageError(format!(
"directory {} does not exist",
current_directory_digest
"directory {current_directory_digest} does not exist"
)))?;
break;
}

View file

@ -78,7 +78,7 @@ impl From<Error> for Status {
fn from(value: Error) -> Self {
match value {
Error::InvalidRequest(msg) => Status::invalid_argument(msg),
Error::StorageError(msg) => Status::data_loss(format!("storage error: {}", msg)),
Error::StorageError(msg) => Status::data_loss(format!("storage error: {msg}")),
}
}
}
@ -140,7 +140,7 @@ impl From<Error> for std::io::Error {
fn from(value: Error) -> Self {
match value {
Error::InvalidRequest(msg) => Self::new(std::io::ErrorKind::InvalidInput, msg),
Error::StorageError(msg) => Self::new(std::io::ErrorKind::Other, msg),
Error::StorageError(msg) => Self::other(msg),
}
}
}

View file

@ -76,13 +76,13 @@ impl FuseDaemon {
let server = Arc::new(fuse_backend_rs::api::server::Server::new(Arc::new(fs)));
let mut session = FuseSession::new(mountpoint.as_ref(), "snix-castore", "", true)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
.map_err(|e| io::Error::other(e.to_string()))?;
#[cfg(target_os = "linux")]
session.set_allow_other(allow_other);
session
.mount()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
.map_err(|e| io::Error::other(e.to_string()))?;
// construct a thread pool
let threads = threadpool::Builder::new()
@ -99,7 +99,7 @@ impl FuseDaemon {
server: server.clone(),
channel: session
.new_channel()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?,
.map_err(|e| io::Error::other(e.to_string()))?,
};
// Start the FuseServer in each thread, and enter the tokio runtime context,
@ -131,7 +131,7 @@ impl FuseDaemon {
self.session
.lock()
.umount()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
.map_err(|e| io::Error::other(e.to_string()))?;
self.wait();
Ok(())

View file

@ -54,7 +54,7 @@ impl error::Error for Error {}
impl convert::From<Error> for io::Error {
fn from(e: Error) -> Self {
io::Error::new(io::ErrorKind::Other, e)
io::Error::other(e)
}
}
@ -195,7 +195,7 @@ where
queue
.get_queue_mut()
.enable_notification(self.guest_mem.memory().deref())
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
.map_err(|e| io::Error::other(e.to_string()))?;
if !self.process_queue(&mut queue)? {
break;
}

View file

@ -143,7 +143,7 @@ where
// convert to castore PathBuf
let path = crate::path::PathBuf::from_host_path(fs_path, false)
.unwrap_or_else(|e| panic!("Snix bug: walkdir direntry cannot be parsed: {}", e));
.unwrap_or_else(|e| panic!("Snix bug: walkdir direntry cannot be parsed: {e}"));
if file_type.is_dir() {
Ok(IngestionEntry::Dir { path })
@ -166,7 +166,7 @@ where
let digest = upload_blob(blob_service, entry.path().to_path_buf(), reference_scanner)
.instrument({
let span = info_span!("upload_blob", "indicatif.pb_show" = tracing::field::Empty);
span.pb_set_message(&format!("Uploading blob for {:?}", fs_path));
span.pb_set_message(&format!("Uploading blob for {fs_path:?}"));
span.pb_set_style(&snix_tracing::PB_TRANSFER_STYLE);
span

View file

@ -98,7 +98,7 @@ where
Err(e) => {
return Err(IngestionError::UploadDirectoryError(
path,
crate::Error::StorageError(format!("invalid symlink target: {}", e)),
crate::Error::StorageError(format!("invalid symlink target: {e}")),
));
}
}

View file

@ -53,7 +53,7 @@ where
tonic::Status::new(tonic::Code::Internal, e.to_string())
})?
.ok_or_else(|| {
Status::not_found(format!("directory {} not found", digest))
Status::not_found(format!("directory {digest} not found"))
})?;
Box::pin(once(Ok(directory.into())))

View file

@ -155,7 +155,7 @@ impl<'a> Repl<'a> {
Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break,
Err(err) => {
writeln!(stderr, "error: {}", err).unwrap();
writeln!(stderr, "error: {err}").unwrap();
break;
}
}

View file

@ -542,7 +542,7 @@ mod pure_builtins {
let len = length.as_int()?;
let mut out = Vec::with_capacity(
len.try_into()
.map_err(|_| ErrorKind::Abort(format!("can not create list of size {}", len)))?,
.map_err(|_| ErrorKind::Abort(format!("can not create list of size {len}")))?,
);
// the best span we can get…
@ -655,17 +655,17 @@ mod pure_builtins {
for ctx_element in group {
match ctx_element {
NixContextElement::Plain(spath) => {
debug_assert!(spath == key, "Unexpected group containing mixed keys, expected: {:?}, encountered {:?}", key, spath);
debug_assert!(spath == key, "Unexpected group containing mixed keys, expected: {key:?}, encountered {spath:?}");
is_path = true;
}
NixContextElement::Single { name, derivation } => {
debug_assert!(derivation == key, "Unexpected group containing mixed keys, expected: {:?}, encountered {:?}", key, derivation);
debug_assert!(derivation == key, "Unexpected group containing mixed keys, expected: {key:?}, encountered {derivation:?}");
outputs.push(name.clone().into());
}
NixContextElement::Derivation(drv_path) => {
debug_assert!(drv_path == key, "Unexpected group containing mixed keys, expected: {:?}, encountered {:?}", key, drv_path);
debug_assert!(drv_path == key, "Unexpected group containing mixed keys, expected: {key:?}, encountered {drv_path:?}");
all_outputs = true;
}
}
@ -992,7 +992,7 @@ mod pure_builtins {
}
let re = re.to_str()?;
let re = re.to_str()?;
let re: Regex = cached_regex(&format!("^{}$", re))
let re: Regex = cached_regex(&format!("^{re}$"))
.map_err(|_| ErrorKind::InvalidRegex(re.to_string()))?;
match re.captures(s.to_str()?) {

View file

@ -298,15 +298,15 @@ mod tests {
fn xml_escape() {
match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("ab<>c&de") {
Cow::Owned(s) => assert_eq!(s, "ab&lt;&gt;c&amp;de".to_string(), "escape stuff"),
Cow::Borrowed(s) => panic!("s should be owned {}", s),
Cow::Borrowed(s) => panic!("s should be owned {s}"),
}
match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("") {
Cow::Borrowed(s) => assert_eq!(s, "", "empty escape is borrowed"),
Cow::Owned(s) => panic!("s should be borrowed {}", s),
Cow::Owned(s) => panic!("s should be borrowed {s}"),
}
match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!ŷbla") {
Cow::Borrowed(s) => assert_eq!(s, "hi!ŷbla", "no escape is borrowed"),
Cow::Owned(s) => panic!("s should be borrowed {}", s),
Cow::Owned(s) => panic!("s should be borrowed {s}"),
}
match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!<ŷ>bla") {
Cow::Owned(s) => assert_eq!(
@ -314,7 +314,7 @@ mod tests {
"hi!&lt;ŷ&gt;bla".to_string(),
"multi-byte chars are correctly used"
),
Cow::Borrowed(s) => panic!("s should be owned {}", s),
Cow::Borrowed(s) => panic!("s should be owned {s}"),
}
}
}

View file

@ -172,40 +172,40 @@ impl Chunk {
if idx.0 > 0 && source.get_line(self.get_span(idx - 1)) == line {
write!(writer, " |\t")?;
} else {
write!(writer, "{:4}\t", line)?;
write!(writer, "{line:4}\t")?;
}
let _fmt_constant = |idx: ConstantIdx| match &self.constants[idx.0] {
Value::Thunk(t) => t.debug_repr(),
Value::Closure(c) => format!("closure({:p})", c.lambda),
Value::Blueprint(b) => format!("blueprint({:p})", b),
val => format!("{}", val),
Value::Blueprint(b) => format!("blueprint({b:p})"),
val => format!("{val}"),
};
let op: Op = self.code[idx.0].into();
match op.arg_type() {
OpArg::None => {
writeln!(writer, "Op{:?}", op)?;
writeln!(writer, "Op{op:?}")?;
Ok(1)
}
OpArg::Fixed => {
let arg = self.read_u16(idx.0 + 1);
writeln!(writer, "Op{:?}({})", op, arg)?;
writeln!(writer, "Op{op:?}({arg})")?;
Ok(3)
}
OpArg::Uvarint => {
let (arg, size) = self.read_uvarint(idx.0 + 1);
writeln!(writer, "Op{:?}({})", op, arg)?;
writeln!(writer, "Op{op:?}({arg})")?;
Ok(1 + size)
}
_ => match op {
Op::CoerceToString => {
let kind: CoercionKind = self.code[idx.0 + 1].into();
writeln!(writer, "Op{:?}({:?})", op, kind)?;
writeln!(writer, "Op{op:?}({kind:?})")?;
Ok(2)
}
@ -221,11 +221,11 @@ impl Chunk {
let captures_with = packed_count & 0b1 == 1;
let count = packed_count >> 1;
write!(writer, "Op{:?}(BP @ {}, ", op, bp_idx)?;
write!(writer, "Op{op:?}(BP @ {bp_idx}, ")?;
if captures_with {
write!(writer, "captures with, ")?;
}
writeln!(writer, "{} upvalues)", count)?;
writeln!(writer, "{count} upvalues)")?;
for _ in 0..count {
let (_, size) = self.read_uvarint(cidx);
@ -234,7 +234,7 @@ impl Chunk {
Ok(cidx - idx.0)
}
_ => panic!("Snix bug: don't know how to format argument for Op{:?}", op),
_ => panic!("Snix bug: don't know how to format argument for Op{op:?}"),
},
}
}

View file

@ -196,8 +196,7 @@ impl<'source, 'observer> Compiler<'source, 'observer> {
let current_dir = std::env::current_dir().map_err(|e| {
Error::new(
ErrorKind::RelativePathResolution(format!(
"could not determine current directory: {}",
e
"could not determine current directory: {e}"
)),
file.span,
source.clone(),
@ -1527,15 +1526,15 @@ fn compile_src_builtin(
let parsed = rnix::ast::Root::parse(code);
if !parsed.errors().is_empty() {
let mut out = format!("BUG: code for source-builtin '{}' had parser errors", name);
let mut out = format!("BUG: code for source-builtin '{name}' had parser errors");
for error in parsed.errors() {
writeln!(out, "{}", error).unwrap();
writeln!(out, "{error}").unwrap();
}
panic!("{}", out);
}
let file = source.add_file(format!("<src-builtins/{}.nix>", name), code.to_string());
let file = source.add_file(format!("<src-builtins/{name}.nix>"), code.to_string());
let weak = weak.clone();
Value::Thunk(Thunk::new_suspended_native(Box::new(move || {
@ -1555,7 +1554,7 @@ fn compile_src_builtin(
if !result.errors.is_empty() {
return Err(ErrorKind::ImportCompilerError {
path: format!("src-builtins/{}.nix", name).into(),
path: format!("src-builtins/{name}.nix").into(),
errors: result.errors,
});
}

View file

@ -253,7 +253,7 @@ to a missing value in the attribute set(s) included via `with`."#
let mut disp = format!("Snix bug: {}", .msg);
if let Some(metadata) = .metadata {
disp.push_str(&format!("; metadata: {:?}", metadata));
disp.push_str(&format!("; metadata: {metadata:?}"));
}
disp
@ -588,10 +588,7 @@ fn spans_for_parse_errors(file: &File, errors: &[rnix::parser::ParseError]) -> V
rnix::parser::ParseError::DuplicatedArgs(range, name) => (
range.span_for(file),
format!(
"the function argument pattern '{}' was bound more than once",
name
),
format!("the function argument pattern '{name}' was bound more than once"),
),
rnix::parser::ParseError::RecursionLimitExceeded => (
@ -843,7 +840,7 @@ impl Error {
for ctx in &self.contexts {
spans.push(SpanLabel {
label: Some(format!("while {}", ctx)),
label: Some(format!("while {ctx}")),
span: self.span,
style: SpanStyle::Secondary,
});

View file

@ -44,7 +44,7 @@ impl std::fmt::Display for FileType {
FileType::Unknown => "unknown",
};
write!(f, "{}", type_as_str)
write!(f, "{type_as_str}")
}
}

View file

@ -182,7 +182,7 @@ impl<W: Write> TracingObserver<W> {
}
// For other value types, defer to the standard value printer.
_ => write!(&mut self.writer, "{} ", val),
_ => write!(&mut self.writer, "{val} "),
};
}
@ -222,7 +222,7 @@ impl<W: Write> RuntimeObserver for TracingObserver<W> {
};
if let Some(name) = &lambda.name {
let _ = write!(&mut self.writer, "'{}' ", name);
let _ = write!(&mut self.writer, "'{name}' ");
}
let _ = writeln!(
@ -235,13 +235,13 @@ impl<W: Write> RuntimeObserver for TracingObserver<W> {
/// Called when the runtime exits a call frame.
fn observe_exit_call_frame(&mut self, frame_at: usize, stack: &[Value]) {
self.maybe_write_time();
let _ = write!(&mut self.writer, "=== exiting frame {} ===\t ", frame_at);
let _ = write!(&mut self.writer, "=== exiting frame {frame_at} ===\t ");
self.write_stack(stack);
}
fn observe_suspend_call_frame(&mut self, frame_at: usize, stack: &[Value]) {
self.maybe_write_time();
let _ = write!(&mut self.writer, "=== suspending frame {} ===\t", frame_at);
let _ = write!(&mut self.writer, "=== suspending frame {frame_at} ===\t");
self.write_stack(stack);
}
@ -250,8 +250,7 @@ impl<W: Write> RuntimeObserver for TracingObserver<W> {
self.maybe_write_time();
let _ = write!(
&mut self.writer,
"=== entering generator frame '{}' [{}] ===\t",
name, frame_at,
"=== entering generator frame '{name}' [{frame_at}] ===\t",
);
self.write_stack(stack);
@ -261,8 +260,7 @@ impl<W: Write> RuntimeObserver for TracingObserver<W> {
self.maybe_write_time();
let _ = write!(
&mut self.writer,
"=== exiting generator '{}' [{}] ===\t",
name, frame_at
"=== exiting generator '{name}' [{frame_at}] ===\t"
);
self.write_stack(stack);
@ -272,8 +270,7 @@ impl<W: Write> RuntimeObserver for TracingObserver<W> {
self.maybe_write_time();
let _ = write!(
&mut self.writer,
"=== suspending generator '{}' [{}] ===\t",
name, frame_at
"=== suspending generator '{name}' [{frame_at}] ===\t"
);
self.write_stack(stack);
@ -283,19 +280,18 @@ impl<W: Write> RuntimeObserver for TracingObserver<W> {
self.maybe_write_time();
let _ = writeln!(
&mut self.writer,
"=== generator '{}' requested {} ===",
name, msg
"=== generator '{name}' requested {msg} ==="
);
}
fn observe_enter_builtin(&mut self, name: &'static str) {
self.maybe_write_time();
let _ = writeln!(&mut self.writer, "=== entering builtin {} ===", name);
let _ = writeln!(&mut self.writer, "=== entering builtin {name} ===");
}
fn observe_exit_builtin(&mut self, name: &'static str, stack: &[Value]) {
self.maybe_write_time();
let _ = write!(&mut self.writer, "=== exiting builtin {} ===\t", name);
let _ = write!(&mut self.writer, "=== exiting builtin {name} ===\t");
self.write_stack(stack);
}

View file

@ -50,10 +50,7 @@ fn test_kv_attrs() {
if name.to_str().unwrap() == meaning_val.to_str().unwrap()
|| value.to_str().unwrap() == forty_two_val.to_str().unwrap() => {}
_ => panic!(
"K/V attribute set should use optimised representation, but got {:?}",
kv_attrs
),
_ => panic!("K/V attribute set should use optimised representation, but got {kv_attrs:?}"),
}
}

View file

@ -410,7 +410,7 @@ impl Value {
(Value::Float(f), CoercionKind { strong: true, .. }) => {
// contrary to normal Display, coercing a float to a string will
// result in unconditional 6 decimal places
Ok(format!("{:.6}", f).into())
Ok(format!("{f:.6}").into())
}
// Lists are coerced by coercing their elements and interspersing spaces
@ -842,18 +842,18 @@ impl Value {
pub fn explain(&self) -> String {
match self {
Value::Null => "the 'null' value".into(),
Value::Bool(b) => format!("the boolean value '{}'", b),
Value::Integer(i) => format!("the integer '{}'", i),
Value::Float(f) => format!("the float '{}'", f),
Value::String(s) if s.has_context() => format!("the contextful string '{}'", s),
Value::String(s) => format!("the contextless string '{}'", s),
Value::Bool(b) => format!("the boolean value '{b}'"),
Value::Integer(i) => format!("the integer '{i}'"),
Value::Float(f) => format!("the float '{f}'"),
Value::String(s) if s.has_context() => format!("the contextful string '{s}'"),
Value::String(s) => format!("the contextless string '{s}'"),
Value::Path(p) => format!("the path '{}'", p.to_string_lossy()),
Value::Attrs(attrs) => format!("a {}-item attribute set", attrs.len()),
Value::List(list) => format!("a {}-item list", list.len()),
Value::Closure(f) => {
if let Some(name) = &f.lambda.name {
format!("the user-defined Nix function '{}'", name)
format!("the user-defined Nix function '{name}'")
} else {
"a user-defined Nix function".to_string()
}
@ -977,7 +977,7 @@ impl TotalDisplay for Value {
Value::Null => f.write_str("null"),
Value::Bool(true) => f.write_str("true"),
Value::Bool(false) => f.write_str("false"),
Value::Integer(num) => write!(f, "{}", num),
Value::Integer(num) => write!(f, "{num}"),
Value::String(s) => s.fmt(f),
Value::Path(p) => p.display().fmt(f),
Value::Attrs(attrs) => attrs.total_fmt(f, set),

View file

@ -656,13 +656,13 @@ impl NixString {
if is_valid_nix_identifier(&escaped) && !is_keyword(&escaped) {
escaped
} else {
Cow::Owned(format!("\"{}\"", escaped))
Cow::Owned(format!("\"{escaped}\""))
}
}
// An owned string has escapes, and needs the outer quotes
// for display.
Cow::Owned(s) => Cow::Owned(format!("\"{}\"", s)),
Cow::Owned(s) => Cow::Owned(format!("\"{s}\"")),
}
}

View file

@ -87,7 +87,7 @@ enum ThunkRepr {
impl ThunkRepr {
fn debug_repr(&self) -> String {
match self {
ThunkRepr::Evaluated(v) => format!("thunk(val|{})", v),
ThunkRepr::Evaluated(v) => format!("thunk(val|{v})"),
ThunkRepr::Blackhole { .. } => "thunk(blackhole)".to_string(),
ThunkRepr::Native(_) => "thunk(native)".to_string(),
ThunkRepr::Suspended { lambda, .. } => format!("thunk({:p})", *lambda),

View file

@ -155,7 +155,7 @@ impl Display for VMRequest {
if *import_paths { "" } else { "non_" },
v.type_of()
),
VMRequest::Call(v) => write!(f, "call({})", v),
VMRequest::Call(v) => write!(f, "call({v})"),
VMRequest::EnterLambda { lambda, .. } => {
write!(f, "enter_lambda({:p})", *lambda)
}
@ -208,12 +208,12 @@ impl Display for VMResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
VMResponse::Empty => write!(f, "empty"),
VMResponse::Value(v) => write!(f, "value({})", v),
VMResponse::Value(v) => write!(f, "value({v})"),
VMResponse::Path(p) => write!(f, "path({})", p.to_string_lossy()),
VMResponse::Directory(d) => write!(f, "dir(len = {})", d.len()),
VMResponse::Span(_) => write!(f, "span"),
VMResponse::Reader(_) => write!(f, "reader"),
VMResponse::FileType(t) => write!(f, "file_type({})", t),
VMResponse::FileType(t) => write!(f, "file_type({t})"),
}
}
}
@ -527,10 +527,7 @@ pub type GenCo = Co<VMRequest, VMResponse>;
pub async fn request_stack_push(co: &GenCo, val: Value) {
match co.yield_(VMRequest::StackPush(val)).await {
VMResponse::Empty => {}
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -539,10 +536,7 @@ pub async fn request_stack_push(co: &GenCo, val: Value) {
pub async fn request_stack_pop(co: &GenCo) -> Value {
match co.yield_(VMRequest::StackPop).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -551,10 +545,7 @@ pub async fn request_force(co: &GenCo, val: Value) -> Value {
if let Value::Thunk(_) = val {
match co.yield_(VMRequest::ForceValue(val)).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
} else {
val
@ -566,10 +557,7 @@ pub(crate) async fn request_try_force(co: &GenCo, val: Value) -> Value {
if let Value::Thunk(_) = val {
match co.yield_(VMRequest::TryForce(val)).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
} else {
val
@ -582,10 +570,7 @@ pub async fn request_call(co: &GenCo, val: Value) -> Value {
let val = request_force(co, val).await;
match co.yield_(VMRequest::Call(val)).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -624,10 +609,7 @@ pub async fn request_string_coerce(
VMResponse::Value(value) => Ok(value
.to_contextful_str()
.expect("coerce_to_string always returns a string")),
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
},
}
}
@ -636,10 +618,7 @@ pub async fn request_string_coerce(
pub async fn request_deep_force(co: &GenCo, val: Value) -> Value {
match co.yield_(VMRequest::DeepForceValue(val)).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -656,10 +635,7 @@ pub(crate) async fn check_equality(
{
VMResponse::Value(Value::Bool(b)) => Ok(Ok(b)),
VMResponse::Value(Value::Catchable(cek)) => Ok(Err(*cek)),
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -667,10 +643,7 @@ pub(crate) async fn check_equality(
pub(crate) async fn emit_warning(co: &GenCo, warning: EvalWarning) {
match co.yield_(VMRequest::EmitWarning(warning)).await {
VMResponse::Empty => {}
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -678,10 +651,7 @@ pub(crate) async fn emit_warning(co: &GenCo, warning: EvalWarning) {
pub async fn emit_warning_kind(co: &GenCo, kind: WarningKind) {
match co.yield_(VMRequest::EmitWarningKind(kind)).await {
VMResponse::Empty => {}
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -700,10 +670,7 @@ pub(crate) async fn request_enter_lambda(
match co.yield_(msg).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -712,10 +679,7 @@ pub(crate) async fn request_import_cache_lookup(co: &GenCo, path: PathBuf) -> Op
match co.yield_(VMRequest::ImportCacheLookup(path)).await {
VMResponse::Value(value) => Some(value),
VMResponse::Empty => None,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -723,10 +687,7 @@ pub(crate) async fn request_import_cache_lookup(co: &GenCo, path: PathBuf) -> Op
pub(crate) async fn request_import_cache_put(co: &GenCo, path: PathBuf, value: Value) {
match co.yield_(VMRequest::ImportCachePut(path, value)).await {
VMResponse::Empty => {}
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -734,10 +695,7 @@ pub(crate) async fn request_import_cache_put(co: &GenCo, path: PathBuf, value: V
pub(crate) async fn request_path_import(co: &GenCo, path: PathBuf) -> PathBuf {
match co.yield_(VMRequest::PathImport(path)).await {
VMResponse::Path(path) => path,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -745,10 +703,7 @@ pub(crate) async fn request_path_import(co: &GenCo, path: PathBuf) -> PathBuf {
pub async fn request_open_file(co: &GenCo, path: PathBuf) -> Box<dyn std::io::Read> {
match co.yield_(VMRequest::OpenFile(path)).await {
VMResponse::Reader(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -756,10 +711,7 @@ pub async fn request_open_file(co: &GenCo, path: PathBuf) -> Box<dyn std::io::Re
pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
match co.yield_(VMRequest::PathExists(path)).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -767,20 +719,14 @@ pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(bytes::Bytes, FileType)> {
match co.yield_(VMRequest::ReadDir(path)).await {
VMResponse::Directory(dir) => dir,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
pub(crate) async fn request_span(co: &GenCo) -> Span {
match co.yield_(VMRequest::Span).await {
VMResponse::Span(span) => span,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -788,10 +734,7 @@ pub(crate) async fn request_span(co: &GenCo) -> Span {
pub(crate) async fn request_read_file_type(co: &GenCo, path: PathBuf) -> FileType {
match co.yield_(VMRequest::ReadFileType(path)).await {
VMResponse::FileType(file_type) => file_type,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}

View file

@ -1234,10 +1234,7 @@ async fn resolve_with(
async fn fetch_forced_with(co: &GenCo, idx: usize) -> Value {
match co.yield_(VMRequest::WithValue(idx)).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}
@ -1245,10 +1242,7 @@ async fn resolve_with(
async fn fetch_captured_with(co: &GenCo, idx: usize) -> Value {
match co.yield_(VMRequest::CapturedWithValue(idx)).await {
VMResponse::Value(value) => value,
msg => panic!(
"Snix bug: VM responded with incorrect generator message: {}",
msg
),
msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"),
}
}

View file

@ -92,7 +92,7 @@ impl EvalWarning {
}
WarningKind::ShadowedGlobal(name) => {
format!("declared variable '{}' shadows a built-in global!", name)
format!("declared variable '{name}' shadows a built-in global!")
}
WarningKind::DeprecatedLegacyLet => {
@ -100,11 +100,11 @@ impl EvalWarning {
}
WarningKind::InvalidNixPath(ref err) => {
format!("invalid NIX_PATH resulted in a parse error: {}", err)
format!("invalid NIX_PATH resulted in a parse error: {err}")
}
WarningKind::UselessBoolOperation(msg) => {
format!("useless operation on boolean: {}", msg)
format!("useless operation on boolean: {msg}")
}
WarningKind::DeadCode => "this code will never be executed".to_string(),
@ -113,14 +113,13 @@ impl EvalWarning {
WarningKind::EmptyLet => "this `let`-expression contains no bindings".to_string(),
WarningKind::ShadowedOutput(ref out) => format!(
"this derivation's environment shadows the output name {}",
out
),
WarningKind::ShadowedOutput(ref out) => {
format!("this derivation's environment shadows the output name {out}")
}
WarningKind::SRIHashWrongPadding => "SRI hash has wrong padding".to_string(),
WarningKind::NotImplemented(what) => {
format!("feature not yet implemented in snix: {}", what)
format!("feature not yet implemented in snix: {what}")
}
}
}

View file

@ -43,8 +43,7 @@ fn populate_inputs(drv: &mut Derivation, full_context: NixContext, known_paths:
#[cfg(debug_assertions)]
assert!(
_rest.iter().next().is_none(),
"Extra path not empty for {}",
derivation_str
"Extra path not empty for {derivation_str}"
);
match drv.input_derivations.entry(derivation.clone()) {
@ -65,8 +64,7 @@ fn populate_inputs(drv: &mut Derivation, full_context: NixContext, known_paths:
#[cfg(debug_assertions)]
assert!(
_rest.iter().next().is_none(),
"Extra path not empty for {}",
drv_path
"Extra path not empty for {drv_path}"
);
// We need to know all the outputs *names* of that derivation.
@ -160,7 +158,7 @@ fn handle_fixed_output(
// Peek at hash_str once more.
// If it was a SRI hash, but is not using the correct length, this means
// the padding was wrong. Emit a warning in that case.
let sri_prefix = format!("{}-", algo);
let sri_prefix = format!("{algo}-");
if let Some(rest) = hash_str.strip_prefix(&sri_prefix) {
if data_encoding::BASE64.encode_len(algo.digest_length()) != rest.len() {
return Ok(Some(WarningKind::SRIHashWrongPadding));
@ -475,7 +473,7 @@ pub(crate) mod derivation_builtins {
&drv.hash_derivation_modulo(|drv_path| {
*known_paths
.get_hash_derivation_modulo(&drv_path.to_owned())
.unwrap_or_else(|| panic!("{} not found", drv_path))
.unwrap_or_else(|| panic!("{drv_path} not found"))
}),
)
.map_err(DerivationError::InvalidDerivation)?;

View file

@ -99,7 +99,7 @@ async fn filtered_ingest(
.await
.map_err(|e| ErrorKind::IO {
path: Some(path.to_path_buf()),
error: Rc::new(std::io::Error::new(std::io::ErrorKind::Other, e)),
error: Rc::new(std::io::Error::other(e)),
})
})
}

View file

@ -117,7 +117,7 @@ mod tests {
snix_eval::Value::String(s) => {
assert_eq!(*s, "/nix/store/xpcvxsx5sw4rbq666blz6sxqlmsqphmr-foo",);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
@ -204,7 +204,7 @@ mod tests {
snix_eval::Value::String(s) => {
assert_eq!(*s, expected_path);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
@ -236,7 +236,7 @@ mod tests {
snix_eval::Value::Bool(v) => {
assert!(v);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
@ -254,7 +254,7 @@ mod tests {
snix_eval::Value::Bool(v) => {
assert!(v);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
@ -276,7 +276,7 @@ mod tests {
snix_eval::Value::Bool(v) => {
assert!(v);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
@ -297,7 +297,7 @@ mod tests {
snix_eval::Value::Bool(v) => {
assert!(v);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
@ -330,7 +330,7 @@ mod tests {
assert_eq!(*s, expected_drvpath);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
};
}
@ -362,7 +362,7 @@ mod tests {
snix_eval::Value::String(s) => {
assert_eq!(*s, expected_path);
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
assert!(
@ -536,7 +536,7 @@ mod tests {
snix_eval::Value::String(s) => {
assert_eq!(expected_outpath, s.as_bstr());
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
assert!(eval_result.errors.is_empty(), "errors should be empty");
@ -580,7 +580,7 @@ mod tests {
s.as_bstr()
);
}
v => panic!("unexpected value type: {:?}", v),
v => panic!("unexpected value type: {v:?}"),
}
} else {
assert!(value.is_none(), "unexpected success on illegal store paths");
@ -626,7 +626,7 @@ mod tests {
s.as_bstr()
);
}
v => panic!("unexpected value type: {:?}", v),
v => panic!("unexpected value type: {v:?}"),
}
} else {
assert!(value.is_none(), "unexpected success on illegal store paths");
@ -714,7 +714,7 @@ mod tests {
snix_eval::Value::String(s) => {
assert_eq!(expected_outpath, s.as_bstr());
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
assert!(eval_result.errors.is_empty(), "errors should be empty");

View file

@ -116,7 +116,7 @@ impl std::fmt::Debug for Fetch {
NixHash::Sha256(*exp_nar_sha256)
)
} else {
write!(f, "Tarball [url: {}, exp_hash: None]", url)
write!(f, "Tarball [url: {url}, exp_hash: None]")
}
}
Fetch::NAR { url, hash } => {

View file

@ -78,7 +78,7 @@ impl KnownPaths {
// compute the hash derivation modulo
let hash_derivation_modulo = drv.hash_derivation_modulo(|drv_path| {
self.get_hash_derivation_modulo(&drv_path.to_owned())
.unwrap_or_else(|| panic!("{} not found", drv_path))
.unwrap_or_else(|| panic!("{drv_path} not found"))
.to_owned()
});

View file

@ -23,7 +23,7 @@ pub fn configure_nix_path<'co, 'ro, 'env, IO>(
eval_builder.nix_path(
nix_search_path
.as_ref()
.map(|p| format!("nix=/__corepkgs__:{}", p))
.map(|p| format!("nix=/__corepkgs__:{p}"))
.or_else(|| Some("nix=/__corepkgs__".to_string())),
)
}

View file

@ -258,7 +258,7 @@ fn handle_pass_as_file(
/// The filepath is `/build/.attrs-${nixbase32(sha256(key))`.
fn calculate_pass_as_file_env(k: &str) -> (String, String) {
(
format!("{}Path", k),
format!("{k}Path"),
format!(
"/build/.attr-{}",
nixbase32::encode(&Sha256::new_with_prefix(k).finalize())

View file

@ -223,7 +223,7 @@ impl SnixStoreIO {
.as_ref()
.do_build(build_request)
.await
.map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?;
.map_err(std::io::Error::other)?;
let mut out_path_info: Option<PathInfo> = None;
@ -256,8 +256,7 @@ impl SnixStoreIO {
all_possible_refs
.get(*idx as usize)
.map(|it| (*it).clone())
.ok_or(std::io::Error::new(
std::io::ErrorKind::Other,
.ok_or(std::io::Error::other(
"invalid build response",
))
})
@ -289,7 +288,7 @@ impl SnixStoreIO {
self.path_info_service
.put(path_info.clone())
.await
.map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?;
.map_err(std::io::Error::other)?;
if store_path == &output_path {
out_path_info = Some(path_info);
@ -309,7 +308,7 @@ impl SnixStoreIO {
Ok(
directoryservice::descend_to(&self.directory_service, path_info.node.clone(), sub_path)
.await
.map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?
.map_err(std::io::Error::other)?
.map(|node| {
path_info.node = node;
path_info
@ -361,7 +360,7 @@ impl EvalIO for SnixStoreIO {
// This would normally be a io::ErrorKind::IsADirectory (still unstable)
Err(io::Error::new(
io::ErrorKind::Unsupported,
format!("tried to open directory at {:?}", path),
format!("tried to open directory at {path:?}"),
))
}
Node::File { digest, .. } => {
@ -558,7 +557,7 @@ mod tests {
let value = result.value.expect("must be some");
match value {
snix_eval::Value::String(s) => Some(s.to_str_lossy().into_owned()),
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
@ -625,7 +624,7 @@ mod tests {
snix_eval::Value::String(s) => {
assert_eq!(*s, "/deep/thought");
}
_ => panic!("unexpected value type: {:?}", value),
_ => panic!("unexpected value type: {value:?}"),
}
}
}

View file

@ -83,9 +83,6 @@ async fn four_o_four() -> Result<(), StatusCode> {
async fn nix_cache_info(priority: u64) -> impl IntoResponse {
(
[("Content-Type", nix_http::MIME_TYPE_CACHE_INFO)],
format!(
"StoreDir: /nix/store\nWantMassQuery: 1\nPriority: {}\n",
priority
),
format!("StoreDir: /nix/store\nWantMassQuery: 1\nPriority: {priority}\n"),
)
}

View file

@ -51,7 +51,7 @@ impl Field {
}
} else {
let path = meta.path.to_token_stream().to_string();
return Err(meta.error(format_args!("unknown nix field attribute '{}'", path)));
return Err(meta.error(format_args!("unknown nix field attribute '{path}'")));
}
Ok(())
}) {
@ -86,9 +86,7 @@ impl Variant {
}
} else {
let path = meta.path.to_token_stream().to_string();
return Err(
meta.error(format_args!("unknown nix variant attribute '{}'", path))
);
return Err(meta.error(format_args!("unknown nix variant attribute '{path}'")));
}
Ok(())
}) {
@ -149,9 +147,7 @@ impl Container {
crate_path = parse_lit(ctx, &meta, CRATE)?;
} else {
let path = meta.path.to_token_stream().to_string();
return Err(
meta.error(format_args!("unknown nix variant attribute '{}'", path))
);
return Err(meta.error(format_args!("unknown nix variant attribute '{path}'")));
}
Ok(())
}) {
@ -190,7 +186,7 @@ pub fn get_lit_str(
} else {
ctx.error_spanned(
expr,
format_args!("expected nix attribute {} to be string", attr),
format_args!("expected nix attribute {attr} to be string"),
);
Ok(None)
}

View file

@ -42,6 +42,6 @@ fn main() {
.expect("unable to serialize")
);
}
Err(e) => eprintln!("unable to parse derivation: {:#?}", e),
Err(e) => eprintln!("unable to parse derivation: {e:#?}"),
}
}

View file

@ -133,7 +133,7 @@ impl Derivation {
name: &str,
) -> Result<StorePath<String>, DerivationError> {
// append .drv to the name
let name = format!("{}.drv", name);
let name = format!("{name}.drv");
// collect the list of paths from input_sources and input_derivations
// into a (sorted, guaranteed by BTreeSet) list of references
@ -362,6 +362,6 @@ fn ca_kind_prefix(ca_hash: &CAHash) -> &'static str {
match ca_hash {
CAHash::Flat(_) => "",
CAHash::Nar(_) => "r:",
_ => panic!("invalid ca hash in derivation context: {:?}", ca_hash),
_ => panic!("invalid ca hash in derivation context: {ca_hash:?}"),
}
}

View file

@ -151,7 +151,7 @@ fn from_aterm_bytes_trailer() {
)]
#[case::unicode("unicode", "52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv")]
fn derivation_path(#[case] name: &str, #[case] expected_path: &str) {
let json_bytes = fs::read(format!("{}/ok/{}.json", RESOURCES_PATHS, expected_path))
let json_bytes = fs::read(format!("{RESOURCES_PATHS}/ok/{expected_path}.json"))
.expect("unable to read JSON");
let derivation: Derivation =
serde_json::from_slice(&json_bytes).expect("JSON was not well-formatted");
@ -194,7 +194,7 @@ fn derivation_without_output_paths(derivation: &Derivation) -> Derivation {
fn hash_derivation_modulo_fixed(#[case] drv_path: &str, #[case] expected_digest: [u8; 32]) {
// read in the fixture
let json_bytes =
fs::read(format!("{}/ok/{}.json", RESOURCES_PATHS, drv_path)).expect("unable to read JSON");
fs::read(format!("{RESOURCES_PATHS}/ok/{drv_path}.json")).expect("unable to read JSON");
let drv: Derivation = serde_json::from_slice(&json_bytes).expect("must deserialize");
let actual = drv.hash_derivation_modulo(|_| panic!("must not be called"));
@ -220,7 +220,7 @@ fn hash_derivation_modulo_fixed(#[case] drv_path: &str, #[case] expected_digest:
fn output_paths(#[case] name: &str, #[case] drv_path_str: &str) {
// read in the derivation
let expected_derivation = Derivation::from_aterm_bytes(
&fs::read(format!("{}/ok/{}", RESOURCES_PATHS, drv_path_str)).expect("unable to read .drv"),
&fs::read(format!("{RESOURCES_PATHS}/ok/{drv_path_str}")).expect("unable to read .drv"),
)
.expect("must succeed");
@ -410,7 +410,7 @@ fn output_path_construction() {
"foo",
&foo_drv.hash_derivation_modulo(|drv_path| {
if drv_path.to_string() != "0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv" {
panic!("lookup called with unexpected drv_path: {}", drv_path);
panic!("lookup called with unexpected drv_path: {drv_path}");
}
bar_drv_hash_derivation_modulo
}),

View file

@ -590,7 +590,7 @@ impl FlakeRef {
_ => unreachable!(),
};
let mut url = Url::parse(&format!("{}://{}/{}", scheme, owner, repo)).unwrap();
let mut url = Url::parse(&format!("{scheme}://{owner}/{repo}")).unwrap();
if let Some(h) = host {
url.set_host(Some(h)).unwrap();
}
@ -609,7 +609,7 @@ impl FlakeRef {
url
}
FlakeRef::Indirect { id, r#ref, rev } => {
let mut url = Url::parse(&format!("indirect://{}", id)).unwrap();
let mut url = Url::parse(&format!("indirect://{id}")).unwrap();
append_params(&mut url, &[("ref", r#ref.clone()), ("rev", rev.clone())]);
url
}

View file

@ -290,7 +290,7 @@ async fn must_read_file(name: &'static str, entry: nar::reader::Entry<'_, '_>) {
assert!(!executable);
assert_eq!(reader.read(&mut [0]).await.unwrap(), 0);
}
_ => panic!("unexpected type for {}", name),
_ => panic!("unexpected type for {name}"),
}
}
@ -305,6 +305,6 @@ fn must_be_symlink(
nar::reader::Node::Symlink { target } => {
assert_eq!(exp_target.as_bytes(), &target);
}
_ => panic!("unexpected type for {}", name),
_ => panic!("unexpected type for {name}"),
}
}

View file

@ -258,7 +258,7 @@ fn must_read_file(name: &'static str, entry: nar::reader::Entry<'_, '_>) {
assert!(!executable);
assert_eq!(reader.read(&mut [0]).unwrap(), 0);
}
_ => panic!("unexpected type for {}", name),
_ => panic!("unexpected type for {name}"),
}
}
@ -273,6 +273,6 @@ fn must_be_symlink(
nar::reader::Node::Symlink { target } => {
assert_eq!(exp_target.as_bytes(), &target);
}
_ => panic!("unexpected type for {}", name),
_ => panic!("unexpected type for {name}"),
}
}

View file

@ -357,7 +357,7 @@ mod tests {
}
Err(e) => {
w.write_value(&STDERR_ERROR).await.unwrap();
w.write_value(&NixError::new(format!("{:?}", e)))
w.write_value(&NixError::new(format!("{e:?}")))
.await
.unwrap();
}

View file

@ -118,7 +118,7 @@ impl NixDeserialize for CAHash {
let value: Option<String> = reader.try_read_value().await?;
match value {
Some(value) => Ok(Some(CAHash::from_nix_hex_str(&value).ok_or_else(|| {
R::Error::invalid_data(format!("Invalid cahash {}", value))
R::Error::invalid_data(format!("Invalid cahash {value}"))
})?)),
None => Ok(None),
}
@ -137,7 +137,7 @@ impl NixDeserialize for Option<CAHash> {
Ok(None)
} else {
Ok(Some(Some(CAHash::from_nix_hex_str(&value).ok_or_else(
|| R::Error::invalid_data(format!("Invalid cahash {}", value)),
|| R::Error::invalid_data(format!("Invalid cahash {value}")),
)?)))
}
}

View file

@ -168,7 +168,7 @@ where
if worker_magic_1 != WORKER_MAGIC_1 {
Err(std::io::Error::new(
ErrorKind::InvalidData,
format!("Incorrect worker magic number received: {}", worker_magic_1),
format!("Incorrect worker magic number received: {worker_magic_1}"),
))
} else {
conn.write_u64_le(WORKER_MAGIC_2).await?;
@ -182,7 +182,7 @@ where
if client_version < ProtocolVersion::from_parts(1, 10) {
return Err(Error::new(
ErrorKind::Unsupported,
format!("The nix client version {} is too old", client_version),
format!("The nix client version {client_version} is too old"),
));
}
let picked_version = min(PROTOCOL_VERSION, client_version);
@ -215,7 +215,7 @@ pub async fn read_op<R: AsyncReadExt + Unpin>(r: &mut R) -> std::io::Result<Oper
Operation::try_from(op_number).map_err(|_| {
Error::new(
ErrorKind::InvalidData,
format!("Invalid OP number {}", op_number),
format!("Invalid OP number {op_number}"),
)
})
}

View file

@ -202,7 +202,7 @@ where
/// That is just the string representation, prefixed with the store prefix
/// ([STORE_DIR_WITH_SLASH]),
pub fn to_absolute_path(&self) -> String {
format!("{}{}", STORE_DIR_WITH_SLASH, self)
format!("{STORE_DIR_WITH_SLASH}{self}")
}
}

View file

@ -203,7 +203,7 @@ fn make_references_string<S: AsRef<str>, I: IntoIterator<Item = S>>(
/// The actual placeholder is basically just a SHA256 hash encoded in
/// cppnix format.
pub fn hash_placeholder(name: &str) -> String {
let digest = Sha256::new_with_prefix(format!("nix-output:{}", name)).finalize();
let digest = Sha256::new_with_prefix(format!("nix-output:{name}")).finalize();
format!("/{}", nixbase32::encode(&digest))
}

View file

@ -554,7 +554,7 @@ mod tests {
let payload = &hex!("FF0102030405060708");
let mut mock = Builder::new()
.read(&produce_packet_bytes(payload).await[..offset])
.read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo"))
.read_error(std::io::Error::other("foo"))
.build();
// Either length reading or data reading can fail, depending on which test case we're in.
@ -595,7 +595,7 @@ mod tests {
let payload = &hex!("FF0102030405060708");
let mock = Builder::new()
.read(&produce_packet_bytes(payload).await[..offset])
.read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo"))
.read_error(std::io::Error::other("foo"))
.build();
let mut mock = BufReader::new(mock);
@ -631,7 +631,7 @@ mod tests {
let payload = &hex!("FF0102030405060708");
let mut mock = Builder::new()
.read(&produce_packet_bytes(payload).await)
.read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo"))
.read_error(std::io::Error::other("foo"))
.build();
let mut r = BytesReader::new(&mut mock, ..MAX_LEN).await.unwrap();
@ -648,7 +648,7 @@ mod tests {
let payload = &hex!("FF0102030405060708");
let mock = Builder::new()
.read(&produce_packet_bytes(payload).await)
.read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo"))
.read_error(std::io::Error::other("foo"))
.build();
let mut mock = BufReader::new(mock);

View file

@ -477,7 +477,7 @@ mod tests {
let mut mock = Builder::new()
.write(&1u32.to_le_bytes())
.write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿"))
.write_error(std::io::Error::other("🍿"))
.build();
let mut w = BytesWriter::new(&mut mock, payload.len() as u64);
@ -492,7 +492,7 @@ mod tests {
let mut mock = Builder::new()
.write(&1u32.to_le_bytes())
.write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿"))
.write_error(std::io::Error::other("🍿"))
.build();
let mut w = BytesWriter::new(&mut mock, payload.len() as u64);
@ -508,7 +508,7 @@ mod tests {
let mut mock = Builder::new()
.write(&2u64.to_le_bytes())
.write(&hex!("f0"))
.write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿"))
.write_error(std::io::Error::other("🍿"))
.build();
let mut w = BytesWriter::new(&mut mock, payload.len() as u64);
@ -526,7 +526,7 @@ mod tests {
.write(&1u64.to_le_bytes())
.write(&hex!("f0"))
.write(&hex!("00"))
.write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿"))
.write_error(std::io::Error::other("🍿"))
.build();
let mut w = BytesWriter::new(&mut mock, payload.len() as u64);

View file

@ -24,7 +24,7 @@ pub trait Error: Sized + StdError {
/// Some kind of std::io::Error occured.
fn io_error(err: std::io::Error) -> Self {
Self::custom(format_args!("There was an I/O error {}", err))
Self::custom(format_args!("There was an I/O error {err}"))
}
/// The data read from `NixRead` is invalid.
@ -41,7 +41,7 @@ pub trait Error: Sized + StdError {
impl Error for io::Error {
fn custom<T: fmt::Display>(msg: T) -> Self {
io::Error::new(io::ErrorKind::Other, msg.to_string())
io::Error::other(msg.to_string())
}
fn io_error(err: std::io::Error) -> Self {

View file

@ -215,7 +215,7 @@ impl Builder {
Operation::WriteDisplay(_, Err(Error::WrongWrite(op, OperationType::WriteDisplay))) => {
self.write_operation_type(*op)
}
s => panic!("Invalid operation {:?}", s),
s => panic!("Invalid operation {s:?}"),
}
}

View file

@ -19,7 +19,7 @@ pub trait Error: Sized + StdError {
fn custom<T: fmt::Display>(msg: T) -> Self;
fn io_error(err: std::io::Error) -> Self {
Self::custom(format_args!("There was an I/O error {}", err))
Self::custom(format_args!("There was an I/O error {err}"))
}
fn unsupported_data<T: fmt::Display>(msg: T) -> Self {
@ -33,7 +33,7 @@ pub trait Error: Sized + StdError {
impl Error for io::Error {
fn custom<T: fmt::Display>(msg: T) -> Self {
io::Error::new(io::ErrorKind::Other, msg.to_string())
io::Error::other(msg.to_string())
}
fn io_error(err: std::io::Error) -> Self {

View file

@ -203,7 +203,7 @@ where
}
let offset = self.buf.len();
self.buf.put_u64_le(0);
if let Err(err) = write!(self.buf, "{}", msg) {
if let Err(err) = write!(self.buf, "{msg}") {
self.buf.truncate(offset);
return Err(Self::Error::unsupported_data(err));
}

View file

@ -31,5 +31,5 @@ fn main() {
// Now you can use snix_serde to deserialise the struct:
let foods: Data = snix_serde::from_str(code).expect("deserialisation should succeed");
println!("These are the foods:\n{:#?}", foods);
println!("These are the foods:\n{foods:#?}");
}

View file

@ -29,6 +29,6 @@ fn main() {
match result {
Ok(cfg) => println!("Config says: {}:{}", cfg.host, cfg.port),
Err(e) => eprintln!("{:?} / {}", e, e),
Err(e) => eprintln!("{e:?} / {e}"),
}
}

View file

@ -41,16 +41,15 @@ impl Display for Error {
match self {
Error::Unserializable { value_type } => write!(
f,
"can not deserialise a Nix '{}' into a Rust type",
value_type
"can not deserialise a Nix '{value_type}' into a Rust type"
),
Error::Unsupported { wanted } => {
write!(f, "can not deserialize a '{}' from a Nix value", wanted)
write!(f, "can not deserialize a '{wanted}' from a Nix value")
}
Error::UnexpectedType { expected, got } => {
write!(f, "expected type {}, but got Nix type {}", expected, got)
write!(f, "expected type {expected}, but got Nix type {got}")
}
Error::NixErrors { errors } => {
@ -67,10 +66,10 @@ impl Display for Error {
Ok(())
}
Error::Deserialization(err) => write!(f, "deserialisation error occured: {}", err),
Error::Deserialization(err) => write!(f, "deserialisation error occured: {err}"),
Error::IntegerConversion { got, need } => {
write!(f, "i64({}) does not fit in a {}", got, need)
write!(f, "i64({got}) does not fit in a {need}")
}
Error::AmbiguousEnum => write!(f, "could not determine enum variant: ambiguous keys"),

View file

@ -287,7 +287,7 @@ async fn run_cli(
async move {
let span = Span::current();
span.pb_set_style(&snix_tracing::PB_SPINNER_STYLE);
span.pb_set_message(&format!("Ingesting {:?}", path));
span.pb_set_message(&format!("Ingesting {path:?}"));
span.pb_start();
// Ingest the contents at the given path into castore.
@ -300,7 +300,7 @@ async fn run_cli(
.await
.map_err(std::io::Error::custom)?;
span.pb_set_message(&format!("NAR Calculation for {:?}", path));
span.pb_set_message(&format!("NAR Calculation for {path:?}"));
// Ask for the NAR size and sha256
let (nar_size, nar_sha256) =

View file

@ -94,7 +94,7 @@ where
let root_node =
ingest_path::<_, _, _, &[u8]>(blob_service, directory_service, path.as_ref(), None)
.await
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
.map_err(std::io::Error::other)?;
// Ask for the NAR size and sha256
let (nar_size, nar_sha256) = nar_calculation_service.calculate_nar(&root_node).await?;
@ -108,7 +108,7 @@ where
store_path::build_ca_path(name, &ca, std::iter::empty::<&str>(), false).map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("invalid name: {}", name),
format!("invalid name: {name}"),
)
})?;

View file

@ -39,7 +39,7 @@ where
self.directory_service.clone(),
)
.await
.map_err(|e| snix_castore::Error::StorageError(format!("failed rendering nar: {}", e)))
.map_err(|e| snix_castore::Error::StorageError(format!("failed rendering nar: {e}")))
}
}

View file

@ -245,7 +245,7 @@ impl<B: BlobService + 'static> tokio::io::AsyncSeek for Reader<B> {
let this = &mut *self;
if this.seeking {
return Err(io::Error::new(io::ErrorKind::Other, "Already seeking"));
return Err(io::Error::other("Already seeking"));
}
this.seeking = true;

View file

@ -200,7 +200,7 @@ impl PathInfoService for BigtablePathInfoService {
let mut response = client
.read_rows(request)
.await
.map_err(|e| Error::StorageError(format!("unable to read rows: {}", e)))?;
.map_err(|e| Error::StorageError(format!("unable to read rows: {e}")))?;
if response.len() != 1 {
if response.len() > 1 {
@ -241,7 +241,7 @@ impl PathInfoService for BigtablePathInfoService {
// Try to parse the value into a PathInfo message
let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value))
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?;
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {e}")))?;
let path_info = PathInfo::try_from(path_info_proto)
.map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?;
@ -294,7 +294,7 @@ impl PathInfoService for BigtablePathInfoService {
],
})
.await
.map_err(|e| Error::StorageError(format!("unable to mutate rows: {}", e)))?;
.map_err(|e| Error::StorageError(format!("unable to mutate rows: {e}")))?;
if resp.predicate_matched {
trace!("already existed")
@ -321,12 +321,12 @@ impl PathInfoService for BigtablePathInfoService {
let mut rows = client
.stream_rows(request)
.await
.map_err(|e| Error::StorageError(format!("unable to read rows: {}", e)))?.enumerate();
.map_err(|e| Error::StorageError(format!("unable to read rows: {e}")))?.enumerate();
use futures::stream::StreamExt;
while let Some((i, elem)) = rows.next().await {
let (row_key, mut cells) = elem.map_err(|e| Error::StorageError(format!("unable to stream row {}: {}", i, e)))?;
let (row_key, mut cells) = elem.map_err(|e| Error::StorageError(format!("unable to stream row {i}: {e}")))?;
let span = Span::current();
span.record("row.key", bstr::BStr::new(&row_key).to_string());
@ -351,7 +351,7 @@ impl PathInfoService for BigtablePathInfoService {
// Try to parse the value into a PathInfo message.
let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value))
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?;
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {e}")))?;
let path_info = PathInfo::try_from(path_info_proto).map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?;
@ -449,7 +449,7 @@ impl TryFrom<url::Url> for BigtableParameters {
.append_pair("instance_name", &instance_name);
let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default())
.map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?;
.map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {e}")))?;
Ok(params)
}

View file

@ -36,7 +36,7 @@ pub async fn from_addr(
) -> Result<Arc<dyn PathInfoService>, Box<dyn std::error::Error + Send + Sync>> {
#[allow(unused_mut)]
let mut url =
Url::parse(uri).map_err(|e| Error::StorageError(format!("unable to parse url: {}", e)))?;
Url::parse(uri).map_err(|e| Error::StorageError(format!("unable to parse url: {e}")))?;
let path_info_service_config = with_registry(&REG, || {
<DeserializeWithRegistry<Box<dyn ServiceBuilder<Output = dyn PathInfoService>>>>::try_from(

View file

@ -212,7 +212,7 @@ where
&narinfo.ca,
)
.await
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
.map_err(io::Error::other)?;
// ensure the ingested narhash and narsize do actually match.
if narinfo.nar_size != nar_size {

View file

@ -31,7 +31,7 @@ async fn single_file_missing_blob(
crate::nar::RenderError::NARWriterError(e) => {
assert_eq!(io::ErrorKind::NotFound, e.kind());
}
_ => panic!("unexpected error: {:?}", e),
_ => panic!("unexpected error: {e:?}"),
}
}
@ -60,10 +60,10 @@ async fn seekable(
(Ok(_), Err(_)) => panic!("creating reader should have failed but succeeded"),
(Ok(_), Ok(Err(_))) => panic!("creating reader should have failed but succeeded"),
(Err(err), Ok(Ok(_))) => {
panic!("creating reader should have succeeded but failed: {}", err)
panic!("creating reader should have succeeded but failed: {err}")
}
(Err(reader_err), Err(expected_err)) => {
assert_eq!(format!("{}", reader_err), format!("{}", expected_err));
assert_eq!(format!("{reader_err}"), format!("{}", expected_err));
}
(Err(reader_err), Ok(Err(expected_err))) => {
let crate::nar::RenderError::NARWriterError(e) = reader_err else {

View file

@ -30,9 +30,9 @@ async fn read_to_end(
match (reader_result, test_output) {
(Ok(_), Err(_)) => panic!("creating reader should have failed but succeeded"),
(Err(err), Ok(_)) => panic!("creating reader should have succeeded but failed: {}", err),
(Err(err), Ok(_)) => panic!("creating reader should have succeeded but failed: {err}"),
(Err(reader_err), Err(expected_err)) => {
assert_eq!(format!("{}", reader_err), format!("{}", expected_err));
assert_eq!(format!("{reader_err}"), format!("{}", expected_err));
}
(Ok(mut reader), Ok(expected_read_result)) => {
let mut buf: Vec<u8> = vec![];
@ -41,7 +41,7 @@ async fn read_to_end(
match (read_result, expected_read_result) {
(Ok(_), Err(_)) => panic!("read_to_end should have failed but succeeded"),
(Err(err), Ok(_)) => {
panic!("read_to_end should have succeeded but failed: {}", err)
panic!("read_to_end should have succeeded but failed: {err}")
}
(Err(read_err), Err(expected_read_err)) => {
assert_eq!(read_err.kind(), expected_read_err);