diff --git a/snix/build/src/buildservice/dummy.rs b/snix/build/src/buildservice/dummy.rs index 5d0e5163b..2d3864e7b 100644 --- a/snix/build/src/buildservice/dummy.rs +++ b/snix/build/src/buildservice/dummy.rs @@ -11,8 +11,7 @@ pub struct DummyBuildService {} impl BuildService for DummyBuildService { #[instrument(skip(self), ret, err)] async fn do_build(&self, _request: BuildRequest) -> std::io::Result { - Err(std::io::Error::new( - std::io::ErrorKind::Other, + Err(std::io::Error::other( "builds are not supported with DummyBuildService", )) } diff --git a/snix/build/src/buildservice/from_addr.rs b/snix/build/src/buildservice/from_addr.rs index 4f64b2d92..05a25ed23 100644 --- a/snix/build/src/buildservice/from_addr.rs +++ b/snix/build/src/buildservice/from_addr.rs @@ -24,8 +24,8 @@ where BS: BlobService + Send + Sync + Clone + 'static, DS: DirectoryService + Send + Sync + Clone + 'static, { - let url = Url::parse(uri) - .map_err(|e| std::io::Error::other(format!("unable to parse url: {}", e)))?; + let url = + Url::parse(uri).map_err(|e| std::io::Error::other(format!("unable to parse url: {e}")))?; Ok(match url.scheme() { // dummy doesn't care about parameters. diff --git a/snix/build/src/buildservice/oci.rs b/snix/build/src/buildservice/oci.rs index fcd648c2a..ae004fa1c 100644 --- a/snix/build/src/buildservice/oci.rs +++ b/snix/build/src/buildservice/oci.rs @@ -132,10 +132,7 @@ where warn!(stdout=%stdout, stderr=%stderr, exit_code=%child_output.status, "build failed"); - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "nonzero exit code".to_string(), - )); + return Err(std::io::Error::other("nonzero exit code".to_string())); } // Ingest build outputs into the castore. @@ -161,7 +158,7 @@ where .map_err(|e| { std::io::Error::new( std::io::ErrorKind::InvalidData, - format!("Unable to ingest output: {}", e), + format!("Unable to ingest output: {e}"), ) })?, diff --git a/snix/build/src/oci/spec.rs b/snix/build/src/oci/spec.rs index 557cf38cc..3a3e54d8c 100644 --- a/snix/build/src/oci/spec.rs +++ b/snix/build/src/oci/spec.rs @@ -125,7 +125,7 @@ fn configure_process<'a>( .args(command_args) .env( env.into_iter() - .map(|(k, v)| format!("{}={}", k, v)) + .map(|(k, v)| format!("{k}={v}")) .collect::>(), ) .terminal(true) diff --git a/snix/build/src/oci/subuid.rs b/snix/build/src/oci/subuid.rs index c78d5c000..71d7b0068 100644 --- a/snix/build/src/oci/subuid.rs +++ b/snix/build/src/oci/subuid.rs @@ -115,7 +115,7 @@ mod tests { use std::io::Write; let mut file = tempfile::NamedTempFile::new().expect("Could not create tempfile"); for line in content.into_iter() { - writeln!(file, "{}", line).expect(""); + writeln!(file, "{line}").expect(""); } file } diff --git a/snix/castore/src/blobservice/from_addr.rs b/snix/castore/src/blobservice/from_addr.rs index aad50d3df..2285becc3 100644 --- a/snix/castore/src/blobservice/from_addr.rs +++ b/snix/castore/src/blobservice/from_addr.rs @@ -20,7 +20,7 @@ pub async fn from_addr( uri: &str, ) -> Result, Box> { let url = Url::parse(uri) - .map_err(|e| crate::Error::StorageError(format!("unable to parse url: {}", e)))?; + .map_err(|e| crate::Error::StorageError(format!("unable to parse url: {e}")))?; let blob_service_config = with_registry(®, || { >>>::try_from(url) diff --git a/snix/castore/src/blobservice/grpc.rs b/snix/castore/src/blobservice/grpc.rs index 283bda6b5..672a130a4 100644 --- a/snix/castore/src/blobservice/grpc.rs +++ b/snix/castore/src/blobservice/grpc.rs @@ -64,7 +64,7 @@ where { Ok(_blob_meta) => Ok(true), Err(e) if e.code() == Code::NotFound => Ok(false), - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)), + Err(e) => Err(io::Error::other(e)), } } @@ -106,7 +106,7 @@ where Ok(Some(Box::new(Cursor::new(buf)))) } Err(e) if e.code() == Code::NotFound => Ok(None), - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)), + Err(e) => Err(io::Error::other(e)), }; } @@ -175,7 +175,7 @@ where match resp { Err(e) if e.code() == Code::NotFound => Ok(None), - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)), + Err(e) => Err(io::Error::other(e)), Ok(resp) => { let resp = resp.into_inner(); @@ -259,15 +259,14 @@ impl BlobWriter for GR // return the digest from the response, and store it in self.digest for subsequent closes. let digest_len = resp.digest.len(); let digest: B3Digest = resp.digest.try_into().map_err(|_| { - io::Error::new( - io::ErrorKind::Other, - format!("invalid root digest length {} in response", digest_len), - ) + io::Error::other(format!( + "invalid root digest length {digest_len} in response" + )) })?; self.digest = Some(digest.clone()); Ok(digest) } - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e.to_string())), + Err(e) => Err(io::Error::other(e.to_string())), } } } diff --git a/snix/castore/src/blobservice/tests/mod.rs b/snix/castore/src/blobservice/tests/mod.rs index d6222a703..ed48432f9 100644 --- a/snix/castore/src/blobservice/tests/mod.rs +++ b/snix/castore/src/blobservice/tests/mod.rs @@ -98,7 +98,7 @@ async fn put_has_get(blob_service: impl BlobService) { "blob service should now have the blob" ); - let mut r = blob_service + let r = blob_service .open_read(blob_digest) .await .expect("open_read must succeed") diff --git a/snix/castore/src/composition.rs b/snix/castore/src/composition.rs index 1ce62ed15..11ed28cbf 100644 --- a/snix/castore/src/composition.rs +++ b/snix/castore/src/composition.rs @@ -155,7 +155,7 @@ impl<'r, 'de: 'r, T: 'static> SeedFactory<'de, TagString<'de>> for RegistryWithF .0 .iter() .find(|(k, _)| *k == &(TypeId::of::(), tag.as_ref())) - .ok_or_else(|| serde::de::Error::custom(format!("Unknown type: {}", tag)))? + .ok_or_else(|| serde::de::Error::custom(format!("Unknown type: {tag}")))? .1; let entry: &RegistryEntry = ::downcast_ref(&**seed).unwrap(); diff --git a/snix/castore/src/directoryservice/bigtable.rs b/snix/castore/src/directoryservice/bigtable.rs index 1faf9349e..8b3b2dfdb 100644 --- a/snix/castore/src/directoryservice/bigtable.rs +++ b/snix/castore/src/directoryservice/bigtable.rs @@ -202,7 +202,7 @@ impl DirectoryService for BigtableDirectoryService { let mut response = client .read_rows(request) .await - .map_err(|e| Error::StorageError(format!("unable to read rows: {}", e)))?; + .map_err(|e| Error::StorageError(format!("unable to read rows: {e}")))?; if response.len() != 1 { if response.len() > 1 { @@ -244,17 +244,14 @@ impl DirectoryService for BigtableDirectoryService { // For the data in that cell, ensure the digest matches what's requested, before parsing. let got_digest = B3Digest::from(blake3::hash(&row_cell.value).as_bytes()); if got_digest != *digest { - return Err(Error::StorageError(format!( - "invalid digest: {}", - got_digest - ))); + return Err(Error::StorageError(format!("invalid digest: {got_digest}"))); } // Try to parse the value into a Directory message. let directory = proto::Directory::decode(Bytes::from(row_cell.value)) - .map_err(|e| Error::StorageError(format!("unable to decode directory proto: {}", e)))? + .map_err(|e| Error::StorageError(format!("unable to decode directory proto: {e}")))? .try_into() - .map_err(|e| Error::StorageError(format!("invalid Directory message: {}", e)))?; + .map_err(|e| Error::StorageError(format!("invalid Directory message: {e}")))?; Ok(Some(directory)) } @@ -301,7 +298,7 @@ impl DirectoryService for BigtableDirectoryService { ], }) .await - .map_err(|e| Error::StorageError(format!("unable to mutate rows: {}", e)))?; + .map_err(|e| Error::StorageError(format!("unable to mutate rows: {e}")))?; if resp.predicate_matched { trace!("already existed") @@ -376,7 +373,7 @@ impl TryFrom for BigtableParameters { .append_pair("instance_name", &instance_name); let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default()) - .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?; + .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {e}")))?; Ok(params) } diff --git a/snix/castore/src/directoryservice/from_addr.rs b/snix/castore/src/directoryservice/from_addr.rs index 441da56af..7bb0d4015 100644 --- a/snix/castore/src/directoryservice/from_addr.rs +++ b/snix/castore/src/directoryservice/from_addr.rs @@ -28,7 +28,7 @@ pub async fn from_addr( ) -> Result, Box> { #[allow(unused_mut)] let mut url = Url::parse(uri) - .map_err(|e| crate::Error::StorageError(format!("unable to parse url: {}", e)))?; + .map_err(|e| crate::Error::StorageError(format!("unable to parse url: {e}")))?; let directory_service_config = with_registry(®, || { >>>::try_from( diff --git a/snix/castore/src/directoryservice/grpc.rs b/snix/castore/src/directoryservice/grpc.rs index 700aea6fc..340764881 100644 --- a/snix/castore/src/directoryservice/grpc.rs +++ b/snix/castore/src/directoryservice/grpc.rs @@ -71,8 +71,7 @@ where let actual_digest = directory.digest(); if actual_digest != digest { Err(crate::Error::StorageError(format!( - "requested directory with digest {}, but got {}", - digest, actual_digest + "requested directory with digest {digest}, but got {actual_digest}" ))) } else { Ok(Some(directory.try_into().map_err(|_| { @@ -141,8 +140,7 @@ where // it if it's in received_directory_digests (as that // means it once was in expected_directory_digests) Err(crate::Error::StorageError(format!( - "received unexpected directory {}", - directory_digest + "received unexpected directory {directory_digest}" )))?; } received_directory_digests.insert(directory_digest); @@ -177,8 +175,7 @@ where // If this is not empty, then the closure is incomplete if diff_len != 0 { Err(crate::Error::StorageError(format!( - "still expected {} directories, but got premature end of stream", - diff_len + "still expected {diff_len} directories, but got premature end of stream" )))? } else { return diff --git a/snix/castore/src/directoryservice/memory.rs b/snix/castore/src/directoryservice/memory.rs index e6fdb2570..2d383a177 100644 --- a/snix/castore/src/directoryservice/memory.rs +++ b/snix/castore/src/directoryservice/memory.rs @@ -34,13 +34,12 @@ impl DirectoryService for MemoryDirectoryService { let actual_digest = directory.digest(); if actual_digest != *digest { return Err(Error::StorageError(format!( - "requested directory with digest {}, but got {}", - digest, actual_digest + "requested directory with digest {digest}, but got {actual_digest}" ))); } Ok(Some(directory.clone().try_into().map_err(|e| { - crate::Error::StorageError(format!("corrupted directory: {}", e)) + crate::Error::StorageError(format!("corrupted directory: {e}")) })?)) } } diff --git a/snix/castore/src/directoryservice/object_store.rs b/snix/castore/src/directoryservice/object_store.rs index da59eea49..1cf83f795 100644 --- a/snix/castore/src/directoryservice/object_store.rs +++ b/snix/castore/src/directoryservice/object_store.rs @@ -152,8 +152,7 @@ impl DirectoryService for ObjectStoreDirectoryService { // Ensure to only decode the directory objects whose digests we trust if !order_validator.digest_allowed(&digest) { return Err(crate::Error::StorageError(format!( - "received unexpected directory {}", - digest + "received unexpected directory {digest}" ))); } diff --git a/snix/castore/src/directoryservice/tests/mod.rs b/snix/castore/src/directoryservice/tests/mod.rs index d394a5679..6bc2eb115 100644 --- a/snix/castore/src/directoryservice/tests/mod.rs +++ b/snix/castore/src/directoryservice/tests/mod.rs @@ -167,7 +167,7 @@ async fn put_get_foo(directory_service: impl DirectoryService) { ], ]; if !valid_closures.contains(&retrieved_closure) { - panic!("invalid closure returned: {:?}", retrieved_closure); + panic!("invalid closure returned: {retrieved_closure:?}"); } } diff --git a/snix/castore/src/directoryservice/traverse.rs b/snix/castore/src/directoryservice/traverse.rs index 960f809ba..dc02468e8 100644 --- a/snix/castore/src/directoryservice/traverse.rs +++ b/snix/castore/src/directoryservice/traverse.rs @@ -26,7 +26,7 @@ where // If we didn't get the directory node that's linked, that's a store inconsistency, bail out! warn!(directory.digest = %digest, "directory does not exist"); - Error::StorageError(format!("directory {} does not exist", digest)) + Error::StorageError(format!("directory {digest} does not exist")) })?; // look for the component in the [Directory]. diff --git a/snix/castore/src/directoryservice/utils.rs b/snix/castore/src/directoryservice/utils.rs index d073c2c3c..20ea83273 100644 --- a/snix/castore/src/directoryservice/utils.rs +++ b/snix/castore/src/directoryservice/utils.rs @@ -33,8 +33,7 @@ pub fn traverse_directory<'a, DS: DirectoryService + 'static>( let current_directory = match directory_service.get(¤t_directory_digest).await.map_err(|e| { warn!("failed to look up directory"); Error::StorageError(format!( - "unable to look up directory {}: {}", - current_directory_digest, e + "unable to look up directory {current_directory_digest}: {e}" )) })? { // the root node of the requested closure was not found, return an empty list @@ -43,8 +42,7 @@ pub fn traverse_directory<'a, DS: DirectoryService + 'static>( None => { warn!("directory {} does not exist", current_directory_digest); Err(Error::StorageError(format!( - "directory {} does not exist", - current_directory_digest + "directory {current_directory_digest} does not exist" )))?; break; } diff --git a/snix/castore/src/errors.rs b/snix/castore/src/errors.rs index cb0b3966d..84567fbc3 100644 --- a/snix/castore/src/errors.rs +++ b/snix/castore/src/errors.rs @@ -78,7 +78,7 @@ impl From for Status { fn from(value: Error) -> Self { match value { Error::InvalidRequest(msg) => Status::invalid_argument(msg), - Error::StorageError(msg) => Status::data_loss(format!("storage error: {}", msg)), + Error::StorageError(msg) => Status::data_loss(format!("storage error: {msg}")), } } } @@ -140,7 +140,7 @@ impl From for std::io::Error { fn from(value: Error) -> Self { match value { Error::InvalidRequest(msg) => Self::new(std::io::ErrorKind::InvalidInput, msg), - Error::StorageError(msg) => Self::new(std::io::ErrorKind::Other, msg), + Error::StorageError(msg) => Self::other(msg), } } } diff --git a/snix/castore/src/fs/fuse/mod.rs b/snix/castore/src/fs/fuse/mod.rs index aab3cb0bc..cfc313a4d 100644 --- a/snix/castore/src/fs/fuse/mod.rs +++ b/snix/castore/src/fs/fuse/mod.rs @@ -76,13 +76,13 @@ impl FuseDaemon { let server = Arc::new(fuse_backend_rs::api::server::Server::new(Arc::new(fs))); let mut session = FuseSession::new(mountpoint.as_ref(), "snix-castore", "", true) - .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?; + .map_err(|e| io::Error::other(e.to_string()))?; #[cfg(target_os = "linux")] session.set_allow_other(allow_other); session .mount() - .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?; + .map_err(|e| io::Error::other(e.to_string()))?; // construct a thread pool let threads = threadpool::Builder::new() @@ -99,7 +99,7 @@ impl FuseDaemon { server: server.clone(), channel: session .new_channel() - .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?, + .map_err(|e| io::Error::other(e.to_string()))?, }; // Start the FuseServer in each thread, and enter the tokio runtime context, @@ -131,7 +131,7 @@ impl FuseDaemon { self.session .lock() .umount() - .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?; + .map_err(|e| io::Error::other(e.to_string()))?; self.wait(); Ok(()) diff --git a/snix/castore/src/fs/virtiofs.rs b/snix/castore/src/fs/virtiofs.rs index 1461bdbc8..481b34db8 100644 --- a/snix/castore/src/fs/virtiofs.rs +++ b/snix/castore/src/fs/virtiofs.rs @@ -54,7 +54,7 @@ impl error::Error for Error {} impl convert::From for io::Error { fn from(e: Error) -> Self { - io::Error::new(io::ErrorKind::Other, e) + io::Error::other(e) } } @@ -195,7 +195,7 @@ where queue .get_queue_mut() .enable_notification(self.guest_mem.memory().deref()) - .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?; + .map_err(|e| io::Error::other(e.to_string()))?; if !self.process_queue(&mut queue)? { break; } diff --git a/snix/castore/src/import/fs.rs b/snix/castore/src/import/fs.rs index f7b379203..bc414b885 100644 --- a/snix/castore/src/import/fs.rs +++ b/snix/castore/src/import/fs.rs @@ -143,7 +143,7 @@ where // convert to castore PathBuf let path = crate::path::PathBuf::from_host_path(fs_path, false) - .unwrap_or_else(|e| panic!("Snix bug: walkdir direntry cannot be parsed: {}", e)); + .unwrap_or_else(|e| panic!("Snix bug: walkdir direntry cannot be parsed: {e}")); if file_type.is_dir() { Ok(IngestionEntry::Dir { path }) @@ -166,7 +166,7 @@ where let digest = upload_blob(blob_service, entry.path().to_path_buf(), reference_scanner) .instrument({ let span = info_span!("upload_blob", "indicatif.pb_show" = tracing::field::Empty); - span.pb_set_message(&format!("Uploading blob for {:?}", fs_path)); + span.pb_set_message(&format!("Uploading blob for {fs_path:?}")); span.pb_set_style(&snix_tracing::PB_TRANSFER_STYLE); span diff --git a/snix/castore/src/import/mod.rs b/snix/castore/src/import/mod.rs index 438b12fc6..da0837ec2 100644 --- a/snix/castore/src/import/mod.rs +++ b/snix/castore/src/import/mod.rs @@ -98,7 +98,7 @@ where Err(e) => { return Err(IngestionError::UploadDirectoryError( path, - crate::Error::StorageError(format!("invalid symlink target: {}", e)), + crate::Error::StorageError(format!("invalid symlink target: {e}")), )); } } diff --git a/snix/castore/src/proto/grpc_directoryservice_wrapper.rs b/snix/castore/src/proto/grpc_directoryservice_wrapper.rs index f7df6ed66..94c0d2ad4 100644 --- a/snix/castore/src/proto/grpc_directoryservice_wrapper.rs +++ b/snix/castore/src/proto/grpc_directoryservice_wrapper.rs @@ -53,7 +53,7 @@ where tonic::Status::new(tonic::Code::Internal, e.to_string()) })? .ok_or_else(|| { - Status::not_found(format!("directory {} not found", digest)) + Status::not_found(format!("directory {digest} not found")) })?; Box::pin(once(Ok(directory.into()))) diff --git a/snix/cli/src/repl.rs b/snix/cli/src/repl.rs index e6b261172..05594bcb2 100644 --- a/snix/cli/src/repl.rs +++ b/snix/cli/src/repl.rs @@ -155,7 +155,7 @@ impl<'a> Repl<'a> { Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break, Err(err) => { - writeln!(stderr, "error: {}", err).unwrap(); + writeln!(stderr, "error: {err}").unwrap(); break; } } diff --git a/snix/eval/src/builtins/mod.rs b/snix/eval/src/builtins/mod.rs index 127368565..a71b20f72 100644 --- a/snix/eval/src/builtins/mod.rs +++ b/snix/eval/src/builtins/mod.rs @@ -542,7 +542,7 @@ mod pure_builtins { let len = length.as_int()?; let mut out = Vec::with_capacity( len.try_into() - .map_err(|_| ErrorKind::Abort(format!("can not create list of size {}", len)))?, + .map_err(|_| ErrorKind::Abort(format!("can not create list of size {len}")))?, ); // the best span we can get… @@ -655,17 +655,17 @@ mod pure_builtins { for ctx_element in group { match ctx_element { NixContextElement::Plain(spath) => { - debug_assert!(spath == key, "Unexpected group containing mixed keys, expected: {:?}, encountered {:?}", key, spath); + debug_assert!(spath == key, "Unexpected group containing mixed keys, expected: {key:?}, encountered {spath:?}"); is_path = true; } NixContextElement::Single { name, derivation } => { - debug_assert!(derivation == key, "Unexpected group containing mixed keys, expected: {:?}, encountered {:?}", key, derivation); + debug_assert!(derivation == key, "Unexpected group containing mixed keys, expected: {key:?}, encountered {derivation:?}"); outputs.push(name.clone().into()); } NixContextElement::Derivation(drv_path) => { - debug_assert!(drv_path == key, "Unexpected group containing mixed keys, expected: {:?}, encountered {:?}", key, drv_path); + debug_assert!(drv_path == key, "Unexpected group containing mixed keys, expected: {key:?}, encountered {drv_path:?}"); all_outputs = true; } } @@ -992,7 +992,7 @@ mod pure_builtins { } let re = re.to_str()?; let re = re.to_str()?; - let re: Regex = cached_regex(&format!("^{}$", re)) + let re: Regex = cached_regex(&format!("^{re}$")) .map_err(|_| ErrorKind::InvalidRegex(re.to_string()))?; match re.captures(s.to_str()?) { diff --git a/snix/eval/src/builtins/to_xml.rs b/snix/eval/src/builtins/to_xml.rs index 46a711a4c..02c04992e 100644 --- a/snix/eval/src/builtins/to_xml.rs +++ b/snix/eval/src/builtins/to_xml.rs @@ -298,15 +298,15 @@ mod tests { fn xml_escape() { match XmlEmitter::>>::escape_attr_value("ab<>c&de") { Cow::Owned(s) => assert_eq!(s, "ab<>c&de".to_string(), "escape stuff"), - Cow::Borrowed(s) => panic!("s should be owned {}", s), + Cow::Borrowed(s) => panic!("s should be owned {s}"), } match XmlEmitter::>>::escape_attr_value("") { Cow::Borrowed(s) => assert_eq!(s, "", "empty escape is borrowed"), - Cow::Owned(s) => panic!("s should be borrowed {}", s), + Cow::Owned(s) => panic!("s should be borrowed {s}"), } match XmlEmitter::>>::escape_attr_value("hi!ŷbla") { Cow::Borrowed(s) => assert_eq!(s, "hi!ŷbla", "no escape is borrowed"), - Cow::Owned(s) => panic!("s should be borrowed {}", s), + Cow::Owned(s) => panic!("s should be borrowed {s}"), } match XmlEmitter::>>::escape_attr_value("hi!<ŷ>bla") { Cow::Owned(s) => assert_eq!( @@ -314,7 +314,7 @@ mod tests { "hi!<ŷ>bla".to_string(), "multi-byte chars are correctly used" ), - Cow::Borrowed(s) => panic!("s should be owned {}", s), + Cow::Borrowed(s) => panic!("s should be owned {s}"), } } } diff --git a/snix/eval/src/chunk.rs b/snix/eval/src/chunk.rs index 548bfb3b7..487d14567 100644 --- a/snix/eval/src/chunk.rs +++ b/snix/eval/src/chunk.rs @@ -172,40 +172,40 @@ impl Chunk { if idx.0 > 0 && source.get_line(self.get_span(idx - 1)) == line { write!(writer, " |\t")?; } else { - write!(writer, "{:4}\t", line)?; + write!(writer, "{line:4}\t")?; } let _fmt_constant = |idx: ConstantIdx| match &self.constants[idx.0] { Value::Thunk(t) => t.debug_repr(), Value::Closure(c) => format!("closure({:p})", c.lambda), - Value::Blueprint(b) => format!("blueprint({:p})", b), - val => format!("{}", val), + Value::Blueprint(b) => format!("blueprint({b:p})"), + val => format!("{val}"), }; let op: Op = self.code[idx.0].into(); match op.arg_type() { OpArg::None => { - writeln!(writer, "Op{:?}", op)?; + writeln!(writer, "Op{op:?}")?; Ok(1) } OpArg::Fixed => { let arg = self.read_u16(idx.0 + 1); - writeln!(writer, "Op{:?}({})", op, arg)?; + writeln!(writer, "Op{op:?}({arg})")?; Ok(3) } OpArg::Uvarint => { let (arg, size) = self.read_uvarint(idx.0 + 1); - writeln!(writer, "Op{:?}({})", op, arg)?; + writeln!(writer, "Op{op:?}({arg})")?; Ok(1 + size) } _ => match op { Op::CoerceToString => { let kind: CoercionKind = self.code[idx.0 + 1].into(); - writeln!(writer, "Op{:?}({:?})", op, kind)?; + writeln!(writer, "Op{op:?}({kind:?})")?; Ok(2) } @@ -221,11 +221,11 @@ impl Chunk { let captures_with = packed_count & 0b1 == 1; let count = packed_count >> 1; - write!(writer, "Op{:?}(BP @ {}, ", op, bp_idx)?; + write!(writer, "Op{op:?}(BP @ {bp_idx}, ")?; if captures_with { write!(writer, "captures with, ")?; } - writeln!(writer, "{} upvalues)", count)?; + writeln!(writer, "{count} upvalues)")?; for _ in 0..count { let (_, size) = self.read_uvarint(cidx); @@ -234,7 +234,7 @@ impl Chunk { Ok(cidx - idx.0) } - _ => panic!("Snix bug: don't know how to format argument for Op{:?}", op), + _ => panic!("Snix bug: don't know how to format argument for Op{op:?}"), }, } } diff --git a/snix/eval/src/compiler/mod.rs b/snix/eval/src/compiler/mod.rs index 7dc1b9712..f86889758 100644 --- a/snix/eval/src/compiler/mod.rs +++ b/snix/eval/src/compiler/mod.rs @@ -196,8 +196,7 @@ impl<'source, 'observer> Compiler<'source, 'observer> { let current_dir = std::env::current_dir().map_err(|e| { Error::new( ErrorKind::RelativePathResolution(format!( - "could not determine current directory: {}", - e + "could not determine current directory: {e}" )), file.span, source.clone(), @@ -1527,15 +1526,15 @@ fn compile_src_builtin( let parsed = rnix::ast::Root::parse(code); if !parsed.errors().is_empty() { - let mut out = format!("BUG: code for source-builtin '{}' had parser errors", name); + let mut out = format!("BUG: code for source-builtin '{name}' had parser errors"); for error in parsed.errors() { - writeln!(out, "{}", error).unwrap(); + writeln!(out, "{error}").unwrap(); } panic!("{}", out); } - let file = source.add_file(format!("", name), code.to_string()); + let file = source.add_file(format!(""), code.to_string()); let weak = weak.clone(); Value::Thunk(Thunk::new_suspended_native(Box::new(move || { @@ -1555,7 +1554,7 @@ fn compile_src_builtin( if !result.errors.is_empty() { return Err(ErrorKind::ImportCompilerError { - path: format!("src-builtins/{}.nix", name).into(), + path: format!("src-builtins/{name}.nix").into(), errors: result.errors, }); } diff --git a/snix/eval/src/errors.rs b/snix/eval/src/errors.rs index 36d4a92d7..4f3c40b4f 100644 --- a/snix/eval/src/errors.rs +++ b/snix/eval/src/errors.rs @@ -253,7 +253,7 @@ to a missing value in the attribute set(s) included via `with`."# let mut disp = format!("Snix bug: {}", .msg); if let Some(metadata) = .metadata { - disp.push_str(&format!("; metadata: {:?}", metadata)); + disp.push_str(&format!("; metadata: {metadata:?}")); } disp @@ -588,10 +588,7 @@ fn spans_for_parse_errors(file: &File, errors: &[rnix::parser::ParseError]) -> V rnix::parser::ParseError::DuplicatedArgs(range, name) => ( range.span_for(file), - format!( - "the function argument pattern '{}' was bound more than once", - name - ), + format!("the function argument pattern '{name}' was bound more than once"), ), rnix::parser::ParseError::RecursionLimitExceeded => ( @@ -843,7 +840,7 @@ impl Error { for ctx in &self.contexts { spans.push(SpanLabel { - label: Some(format!("while {}", ctx)), + label: Some(format!("while {ctx}")), span: self.span, style: SpanStyle::Secondary, }); diff --git a/snix/eval/src/io.rs b/snix/eval/src/io.rs index cf1558fd6..5aef7f931 100644 --- a/snix/eval/src/io.rs +++ b/snix/eval/src/io.rs @@ -44,7 +44,7 @@ impl std::fmt::Display for FileType { FileType::Unknown => "unknown", }; - write!(f, "{}", type_as_str) + write!(f, "{type_as_str}") } } diff --git a/snix/eval/src/observer.rs b/snix/eval/src/observer.rs index 654d0fbc2..f78ea33e5 100644 --- a/snix/eval/src/observer.rs +++ b/snix/eval/src/observer.rs @@ -182,7 +182,7 @@ impl TracingObserver { } // For other value types, defer to the standard value printer. - _ => write!(&mut self.writer, "{} ", val), + _ => write!(&mut self.writer, "{val} "), }; } @@ -222,7 +222,7 @@ impl RuntimeObserver for TracingObserver { }; if let Some(name) = &lambda.name { - let _ = write!(&mut self.writer, "'{}' ", name); + let _ = write!(&mut self.writer, "'{name}' "); } let _ = writeln!( @@ -235,13 +235,13 @@ impl RuntimeObserver for TracingObserver { /// Called when the runtime exits a call frame. fn observe_exit_call_frame(&mut self, frame_at: usize, stack: &[Value]) { self.maybe_write_time(); - let _ = write!(&mut self.writer, "=== exiting frame {} ===\t ", frame_at); + let _ = write!(&mut self.writer, "=== exiting frame {frame_at} ===\t "); self.write_stack(stack); } fn observe_suspend_call_frame(&mut self, frame_at: usize, stack: &[Value]) { self.maybe_write_time(); - let _ = write!(&mut self.writer, "=== suspending frame {} ===\t", frame_at); + let _ = write!(&mut self.writer, "=== suspending frame {frame_at} ===\t"); self.write_stack(stack); } @@ -250,8 +250,7 @@ impl RuntimeObserver for TracingObserver { self.maybe_write_time(); let _ = write!( &mut self.writer, - "=== entering generator frame '{}' [{}] ===\t", - name, frame_at, + "=== entering generator frame '{name}' [{frame_at}] ===\t", ); self.write_stack(stack); @@ -261,8 +260,7 @@ impl RuntimeObserver for TracingObserver { self.maybe_write_time(); let _ = write!( &mut self.writer, - "=== exiting generator '{}' [{}] ===\t", - name, frame_at + "=== exiting generator '{name}' [{frame_at}] ===\t" ); self.write_stack(stack); @@ -272,8 +270,7 @@ impl RuntimeObserver for TracingObserver { self.maybe_write_time(); let _ = write!( &mut self.writer, - "=== suspending generator '{}' [{}] ===\t", - name, frame_at + "=== suspending generator '{name}' [{frame_at}] ===\t" ); self.write_stack(stack); @@ -283,19 +280,18 @@ impl RuntimeObserver for TracingObserver { self.maybe_write_time(); let _ = writeln!( &mut self.writer, - "=== generator '{}' requested {} ===", - name, msg + "=== generator '{name}' requested {msg} ===" ); } fn observe_enter_builtin(&mut self, name: &'static str) { self.maybe_write_time(); - let _ = writeln!(&mut self.writer, "=== entering builtin {} ===", name); + let _ = writeln!(&mut self.writer, "=== entering builtin {name} ==="); } fn observe_exit_builtin(&mut self, name: &'static str, stack: &[Value]) { self.maybe_write_time(); - let _ = write!(&mut self.writer, "=== exiting builtin {} ===\t", name); + let _ = write!(&mut self.writer, "=== exiting builtin {name} ===\t"); self.write_stack(stack); } diff --git a/snix/eval/src/value/attrs/tests.rs b/snix/eval/src/value/attrs/tests.rs index e8798797f..743d5fe48 100644 --- a/snix/eval/src/value/attrs/tests.rs +++ b/snix/eval/src/value/attrs/tests.rs @@ -50,10 +50,7 @@ fn test_kv_attrs() { if name.to_str().unwrap() == meaning_val.to_str().unwrap() || value.to_str().unwrap() == forty_two_val.to_str().unwrap() => {} - _ => panic!( - "K/V attribute set should use optimised representation, but got {:?}", - kv_attrs - ), + _ => panic!("K/V attribute set should use optimised representation, but got {kv_attrs:?}"), } } diff --git a/snix/eval/src/value/mod.rs b/snix/eval/src/value/mod.rs index feb2508be..bb18c6088 100644 --- a/snix/eval/src/value/mod.rs +++ b/snix/eval/src/value/mod.rs @@ -410,7 +410,7 @@ impl Value { (Value::Float(f), CoercionKind { strong: true, .. }) => { // contrary to normal Display, coercing a float to a string will // result in unconditional 6 decimal places - Ok(format!("{:.6}", f).into()) + Ok(format!("{f:.6}").into()) } // Lists are coerced by coercing their elements and interspersing spaces @@ -842,18 +842,18 @@ impl Value { pub fn explain(&self) -> String { match self { Value::Null => "the 'null' value".into(), - Value::Bool(b) => format!("the boolean value '{}'", b), - Value::Integer(i) => format!("the integer '{}'", i), - Value::Float(f) => format!("the float '{}'", f), - Value::String(s) if s.has_context() => format!("the contextful string '{}'", s), - Value::String(s) => format!("the contextless string '{}'", s), + Value::Bool(b) => format!("the boolean value '{b}'"), + Value::Integer(i) => format!("the integer '{i}'"), + Value::Float(f) => format!("the float '{f}'"), + Value::String(s) if s.has_context() => format!("the contextful string '{s}'"), + Value::String(s) => format!("the contextless string '{s}'"), Value::Path(p) => format!("the path '{}'", p.to_string_lossy()), Value::Attrs(attrs) => format!("a {}-item attribute set", attrs.len()), Value::List(list) => format!("a {}-item list", list.len()), Value::Closure(f) => { if let Some(name) = &f.lambda.name { - format!("the user-defined Nix function '{}'", name) + format!("the user-defined Nix function '{name}'") } else { "a user-defined Nix function".to_string() } @@ -977,7 +977,7 @@ impl TotalDisplay for Value { Value::Null => f.write_str("null"), Value::Bool(true) => f.write_str("true"), Value::Bool(false) => f.write_str("false"), - Value::Integer(num) => write!(f, "{}", num), + Value::Integer(num) => write!(f, "{num}"), Value::String(s) => s.fmt(f), Value::Path(p) => p.display().fmt(f), Value::Attrs(attrs) => attrs.total_fmt(f, set), diff --git a/snix/eval/src/value/string/mod.rs b/snix/eval/src/value/string/mod.rs index 17f2c4c44..a03ea1505 100644 --- a/snix/eval/src/value/string/mod.rs +++ b/snix/eval/src/value/string/mod.rs @@ -656,13 +656,13 @@ impl NixString { if is_valid_nix_identifier(&escaped) && !is_keyword(&escaped) { escaped } else { - Cow::Owned(format!("\"{}\"", escaped)) + Cow::Owned(format!("\"{escaped}\"")) } } // An owned string has escapes, and needs the outer quotes // for display. - Cow::Owned(s) => Cow::Owned(format!("\"{}\"", s)), + Cow::Owned(s) => Cow::Owned(format!("\"{s}\"")), } } diff --git a/snix/eval/src/value/thunk.rs b/snix/eval/src/value/thunk.rs index 1ba294c3e..38392ef16 100644 --- a/snix/eval/src/value/thunk.rs +++ b/snix/eval/src/value/thunk.rs @@ -87,7 +87,7 @@ enum ThunkRepr { impl ThunkRepr { fn debug_repr(&self) -> String { match self { - ThunkRepr::Evaluated(v) => format!("thunk(val|{})", v), + ThunkRepr::Evaluated(v) => format!("thunk(val|{v})"), ThunkRepr::Blackhole { .. } => "thunk(blackhole)".to_string(), ThunkRepr::Native(_) => "thunk(native)".to_string(), ThunkRepr::Suspended { lambda, .. } => format!("thunk({:p})", *lambda), diff --git a/snix/eval/src/vm/generators.rs b/snix/eval/src/vm/generators.rs index 4b843bf58..928ddc74e 100644 --- a/snix/eval/src/vm/generators.rs +++ b/snix/eval/src/vm/generators.rs @@ -155,7 +155,7 @@ impl Display for VMRequest { if *import_paths { "" } else { "non_" }, v.type_of() ), - VMRequest::Call(v) => write!(f, "call({})", v), + VMRequest::Call(v) => write!(f, "call({v})"), VMRequest::EnterLambda { lambda, .. } => { write!(f, "enter_lambda({:p})", *lambda) } @@ -208,12 +208,12 @@ impl Display for VMResponse { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { VMResponse::Empty => write!(f, "empty"), - VMResponse::Value(v) => write!(f, "value({})", v), + VMResponse::Value(v) => write!(f, "value({v})"), VMResponse::Path(p) => write!(f, "path({})", p.to_string_lossy()), VMResponse::Directory(d) => write!(f, "dir(len = {})", d.len()), VMResponse::Span(_) => write!(f, "span"), VMResponse::Reader(_) => write!(f, "reader"), - VMResponse::FileType(t) => write!(f, "file_type({})", t), + VMResponse::FileType(t) => write!(f, "file_type({t})"), } } } @@ -527,10 +527,7 @@ pub type GenCo = Co; pub async fn request_stack_push(co: &GenCo, val: Value) { match co.yield_(VMRequest::StackPush(val)).await { VMResponse::Empty => {} - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -539,10 +536,7 @@ pub async fn request_stack_push(co: &GenCo, val: Value) { pub async fn request_stack_pop(co: &GenCo) -> Value { match co.yield_(VMRequest::StackPop).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -551,10 +545,7 @@ pub async fn request_force(co: &GenCo, val: Value) -> Value { if let Value::Thunk(_) = val { match co.yield_(VMRequest::ForceValue(val)).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } else { val @@ -566,10 +557,7 @@ pub(crate) async fn request_try_force(co: &GenCo, val: Value) -> Value { if let Value::Thunk(_) = val { match co.yield_(VMRequest::TryForce(val)).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } else { val @@ -582,10 +570,7 @@ pub async fn request_call(co: &GenCo, val: Value) -> Value { let val = request_force(co, val).await; match co.yield_(VMRequest::Call(val)).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -624,10 +609,7 @@ pub async fn request_string_coerce( VMResponse::Value(value) => Ok(value .to_contextful_str() .expect("coerce_to_string always returns a string")), - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), }, } } @@ -636,10 +618,7 @@ pub async fn request_string_coerce( pub async fn request_deep_force(co: &GenCo, val: Value) -> Value { match co.yield_(VMRequest::DeepForceValue(val)).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -656,10 +635,7 @@ pub(crate) async fn check_equality( { VMResponse::Value(Value::Bool(b)) => Ok(Ok(b)), VMResponse::Value(Value::Catchable(cek)) => Ok(Err(*cek)), - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -667,10 +643,7 @@ pub(crate) async fn check_equality( pub(crate) async fn emit_warning(co: &GenCo, warning: EvalWarning) { match co.yield_(VMRequest::EmitWarning(warning)).await { VMResponse::Empty => {} - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -678,10 +651,7 @@ pub(crate) async fn emit_warning(co: &GenCo, warning: EvalWarning) { pub async fn emit_warning_kind(co: &GenCo, kind: WarningKind) { match co.yield_(VMRequest::EmitWarningKind(kind)).await { VMResponse::Empty => {} - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -700,10 +670,7 @@ pub(crate) async fn request_enter_lambda( match co.yield_(msg).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -712,10 +679,7 @@ pub(crate) async fn request_import_cache_lookup(co: &GenCo, path: PathBuf) -> Op match co.yield_(VMRequest::ImportCacheLookup(path)).await { VMResponse::Value(value) => Some(value), VMResponse::Empty => None, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -723,10 +687,7 @@ pub(crate) async fn request_import_cache_lookup(co: &GenCo, path: PathBuf) -> Op pub(crate) async fn request_import_cache_put(co: &GenCo, path: PathBuf, value: Value) { match co.yield_(VMRequest::ImportCachePut(path, value)).await { VMResponse::Empty => {} - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -734,10 +695,7 @@ pub(crate) async fn request_import_cache_put(co: &GenCo, path: PathBuf, value: V pub(crate) async fn request_path_import(co: &GenCo, path: PathBuf) -> PathBuf { match co.yield_(VMRequest::PathImport(path)).await { VMResponse::Path(path) => path, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -745,10 +703,7 @@ pub(crate) async fn request_path_import(co: &GenCo, path: PathBuf) -> PathBuf { pub async fn request_open_file(co: &GenCo, path: PathBuf) -> Box { match co.yield_(VMRequest::OpenFile(path)).await { VMResponse::Reader(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -756,10 +711,7 @@ pub async fn request_open_file(co: &GenCo, path: PathBuf) -> Box Value { match co.yield_(VMRequest::PathExists(path)).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -767,20 +719,14 @@ pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value { pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(bytes::Bytes, FileType)> { match co.yield_(VMRequest::ReadDir(path)).await { VMResponse::Directory(dir) => dir, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } pub(crate) async fn request_span(co: &GenCo) -> Span { match co.yield_(VMRequest::Span).await { VMResponse::Span(span) => span, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -788,10 +734,7 @@ pub(crate) async fn request_span(co: &GenCo) -> Span { pub(crate) async fn request_read_file_type(co: &GenCo, path: PathBuf) -> FileType { match co.yield_(VMRequest::ReadFileType(path)).await { VMResponse::FileType(file_type) => file_type, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } diff --git a/snix/eval/src/vm/mod.rs b/snix/eval/src/vm/mod.rs index babe95f7b..63d3e2156 100644 --- a/snix/eval/src/vm/mod.rs +++ b/snix/eval/src/vm/mod.rs @@ -1234,10 +1234,7 @@ async fn resolve_with( async fn fetch_forced_with(co: &GenCo, idx: usize) -> Value { match co.yield_(VMRequest::WithValue(idx)).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } @@ -1245,10 +1242,7 @@ async fn resolve_with( async fn fetch_captured_with(co: &GenCo, idx: usize) -> Value { match co.yield_(VMRequest::CapturedWithValue(idx)).await { VMResponse::Value(value) => value, - msg => panic!( - "Snix bug: VM responded with incorrect generator message: {}", - msg - ), + msg => panic!("Snix bug: VM responded with incorrect generator message: {msg}"), } } diff --git a/snix/eval/src/warnings.rs b/snix/eval/src/warnings.rs index 6bea1e15a..459d34d20 100644 --- a/snix/eval/src/warnings.rs +++ b/snix/eval/src/warnings.rs @@ -92,7 +92,7 @@ impl EvalWarning { } WarningKind::ShadowedGlobal(name) => { - format!("declared variable '{}' shadows a built-in global!", name) + format!("declared variable '{name}' shadows a built-in global!") } WarningKind::DeprecatedLegacyLet => { @@ -100,11 +100,11 @@ impl EvalWarning { } WarningKind::InvalidNixPath(ref err) => { - format!("invalid NIX_PATH resulted in a parse error: {}", err) + format!("invalid NIX_PATH resulted in a parse error: {err}") } WarningKind::UselessBoolOperation(msg) => { - format!("useless operation on boolean: {}", msg) + format!("useless operation on boolean: {msg}") } WarningKind::DeadCode => "this code will never be executed".to_string(), @@ -113,14 +113,13 @@ impl EvalWarning { WarningKind::EmptyLet => "this `let`-expression contains no bindings".to_string(), - WarningKind::ShadowedOutput(ref out) => format!( - "this derivation's environment shadows the output name {}", - out - ), + WarningKind::ShadowedOutput(ref out) => { + format!("this derivation's environment shadows the output name {out}") + } WarningKind::SRIHashWrongPadding => "SRI hash has wrong padding".to_string(), WarningKind::NotImplemented(what) => { - format!("feature not yet implemented in snix: {}", what) + format!("feature not yet implemented in snix: {what}") } } } diff --git a/snix/glue/src/builtins/derivation.rs b/snix/glue/src/builtins/derivation.rs index 0cdce4fc2..cc2e32a47 100644 --- a/snix/glue/src/builtins/derivation.rs +++ b/snix/glue/src/builtins/derivation.rs @@ -43,8 +43,7 @@ fn populate_inputs(drv: &mut Derivation, full_context: NixContext, known_paths: #[cfg(debug_assertions)] assert!( _rest.iter().next().is_none(), - "Extra path not empty for {}", - derivation_str + "Extra path not empty for {derivation_str}" ); match drv.input_derivations.entry(derivation.clone()) { @@ -65,8 +64,7 @@ fn populate_inputs(drv: &mut Derivation, full_context: NixContext, known_paths: #[cfg(debug_assertions)] assert!( _rest.iter().next().is_none(), - "Extra path not empty for {}", - drv_path + "Extra path not empty for {drv_path}" ); // We need to know all the outputs *names* of that derivation. @@ -160,7 +158,7 @@ fn handle_fixed_output( // Peek at hash_str once more. // If it was a SRI hash, but is not using the correct length, this means // the padding was wrong. Emit a warning in that case. - let sri_prefix = format!("{}-", algo); + let sri_prefix = format!("{algo}-"); if let Some(rest) = hash_str.strip_prefix(&sri_prefix) { if data_encoding::BASE64.encode_len(algo.digest_length()) != rest.len() { return Ok(Some(WarningKind::SRIHashWrongPadding)); @@ -475,7 +473,7 @@ pub(crate) mod derivation_builtins { &drv.hash_derivation_modulo(|drv_path| { *known_paths .get_hash_derivation_modulo(&drv_path.to_owned()) - .unwrap_or_else(|| panic!("{} not found", drv_path)) + .unwrap_or_else(|| panic!("{drv_path} not found")) }), ) .map_err(DerivationError::InvalidDerivation)?; diff --git a/snix/glue/src/builtins/import.rs b/snix/glue/src/builtins/import.rs index fcfdd3bc8..ce3fd3243 100644 --- a/snix/glue/src/builtins/import.rs +++ b/snix/glue/src/builtins/import.rs @@ -99,7 +99,7 @@ async fn filtered_ingest( .await .map_err(|e| ErrorKind::IO { path: Some(path.to_path_buf()), - error: Rc::new(std::io::Error::new(std::io::ErrorKind::Other, e)), + error: Rc::new(std::io::Error::other(e)), }) }) } diff --git a/snix/glue/src/builtins/mod.rs b/snix/glue/src/builtins/mod.rs index 6704d5bda..b9b170486 100644 --- a/snix/glue/src/builtins/mod.rs +++ b/snix/glue/src/builtins/mod.rs @@ -117,7 +117,7 @@ mod tests { snix_eval::Value::String(s) => { assert_eq!(*s, "/nix/store/xpcvxsx5sw4rbq666blz6sxqlmsqphmr-foo",); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } @@ -204,7 +204,7 @@ mod tests { snix_eval::Value::String(s) => { assert_eq!(*s, expected_path); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } @@ -236,7 +236,7 @@ mod tests { snix_eval::Value::Bool(v) => { assert!(v); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } @@ -254,7 +254,7 @@ mod tests { snix_eval::Value::Bool(v) => { assert!(v); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } @@ -276,7 +276,7 @@ mod tests { snix_eval::Value::Bool(v) => { assert!(v); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } @@ -297,7 +297,7 @@ mod tests { snix_eval::Value::Bool(v) => { assert!(v); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } @@ -330,7 +330,7 @@ mod tests { assert_eq!(*s, expected_drvpath); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), }; } @@ -362,7 +362,7 @@ mod tests { snix_eval::Value::String(s) => { assert_eq!(*s, expected_path); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } assert!( @@ -536,7 +536,7 @@ mod tests { snix_eval::Value::String(s) => { assert_eq!(expected_outpath, s.as_bstr()); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } assert!(eval_result.errors.is_empty(), "errors should be empty"); @@ -580,7 +580,7 @@ mod tests { s.as_bstr() ); } - v => panic!("unexpected value type: {:?}", v), + v => panic!("unexpected value type: {v:?}"), } } else { assert!(value.is_none(), "unexpected success on illegal store paths"); @@ -626,7 +626,7 @@ mod tests { s.as_bstr() ); } - v => panic!("unexpected value type: {:?}", v), + v => panic!("unexpected value type: {v:?}"), } } else { assert!(value.is_none(), "unexpected success on illegal store paths"); @@ -714,7 +714,7 @@ mod tests { snix_eval::Value::String(s) => { assert_eq!(expected_outpath, s.as_bstr()); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } assert!(eval_result.errors.is_empty(), "errors should be empty"); diff --git a/snix/glue/src/fetchers/mod.rs b/snix/glue/src/fetchers/mod.rs index 15963c52f..816bf876f 100644 --- a/snix/glue/src/fetchers/mod.rs +++ b/snix/glue/src/fetchers/mod.rs @@ -116,7 +116,7 @@ impl std::fmt::Debug for Fetch { NixHash::Sha256(*exp_nar_sha256) ) } else { - write!(f, "Tarball [url: {}, exp_hash: None]", url) + write!(f, "Tarball [url: {url}, exp_hash: None]") } } Fetch::NAR { url, hash } => { diff --git a/snix/glue/src/known_paths.rs b/snix/glue/src/known_paths.rs index 5566d9322..263ff8ba3 100644 --- a/snix/glue/src/known_paths.rs +++ b/snix/glue/src/known_paths.rs @@ -78,7 +78,7 @@ impl KnownPaths { // compute the hash derivation modulo let hash_derivation_modulo = drv.hash_derivation_modulo(|drv_path| { self.get_hash_derivation_modulo(&drv_path.to_owned()) - .unwrap_or_else(|| panic!("{} not found", drv_path)) + .unwrap_or_else(|| panic!("{drv_path} not found")) .to_owned() }); diff --git a/snix/glue/src/lib.rs b/snix/glue/src/lib.rs index 4387a5e0d..5dcd13845 100644 --- a/snix/glue/src/lib.rs +++ b/snix/glue/src/lib.rs @@ -23,7 +23,7 @@ pub fn configure_nix_path<'co, 'ro, 'env, IO>( eval_builder.nix_path( nix_search_path .as_ref() - .map(|p| format!("nix=/__corepkgs__:{}", p)) + .map(|p| format!("nix=/__corepkgs__:{p}")) .or_else(|| Some("nix=/__corepkgs__".to_string())), ) } diff --git a/snix/glue/src/snix_build.rs b/snix/glue/src/snix_build.rs index 2f0442460..1b2bbbc61 100644 --- a/snix/glue/src/snix_build.rs +++ b/snix/glue/src/snix_build.rs @@ -258,7 +258,7 @@ fn handle_pass_as_file( /// The filepath is `/build/.attrs-${nixbase32(sha256(key))`. fn calculate_pass_as_file_env(k: &str) -> (String, String) { ( - format!("{}Path", k), + format!("{k}Path"), format!( "/build/.attr-{}", nixbase32::encode(&Sha256::new_with_prefix(k).finalize()) diff --git a/snix/glue/src/snix_store_io.rs b/snix/glue/src/snix_store_io.rs index 05ace205c..549642757 100644 --- a/snix/glue/src/snix_store_io.rs +++ b/snix/glue/src/snix_store_io.rs @@ -223,7 +223,7 @@ impl SnixStoreIO { .as_ref() .do_build(build_request) .await - .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?; + .map_err(std::io::Error::other)?; let mut out_path_info: Option = None; @@ -256,8 +256,7 @@ impl SnixStoreIO { all_possible_refs .get(*idx as usize) .map(|it| (*it).clone()) - .ok_or(std::io::Error::new( - std::io::ErrorKind::Other, + .ok_or(std::io::Error::other( "invalid build response", )) }) @@ -289,7 +288,7 @@ impl SnixStoreIO { self.path_info_service .put(path_info.clone()) .await - .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?; + .map_err(std::io::Error::other)?; if store_path == &output_path { out_path_info = Some(path_info); @@ -309,7 +308,7 @@ impl SnixStoreIO { Ok( directoryservice::descend_to(&self.directory_service, path_info.node.clone(), sub_path) .await - .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))? + .map_err(std::io::Error::other)? .map(|node| { path_info.node = node; path_info @@ -361,7 +360,7 @@ impl EvalIO for SnixStoreIO { // This would normally be a io::ErrorKind::IsADirectory (still unstable) Err(io::Error::new( io::ErrorKind::Unsupported, - format!("tried to open directory at {:?}", path), + format!("tried to open directory at {path:?}"), )) } Node::File { digest, .. } => { @@ -558,7 +557,7 @@ mod tests { let value = result.value.expect("must be some"); match value { snix_eval::Value::String(s) => Some(s.to_str_lossy().into_owned()), - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } @@ -625,7 +624,7 @@ mod tests { snix_eval::Value::String(s) => { assert_eq!(*s, "/deep/thought"); } - _ => panic!("unexpected value type: {:?}", value), + _ => panic!("unexpected value type: {value:?}"), } } } diff --git a/snix/nar-bridge/src/lib.rs b/snix/nar-bridge/src/lib.rs index 5ff171d4f..c2de3d81c 100644 --- a/snix/nar-bridge/src/lib.rs +++ b/snix/nar-bridge/src/lib.rs @@ -83,9 +83,6 @@ async fn four_o_four() -> Result<(), StatusCode> { async fn nix_cache_info(priority: u64) -> impl IntoResponse { ( [("Content-Type", nix_http::MIME_TYPE_CACHE_INFO)], - format!( - "StoreDir: /nix/store\nWantMassQuery: 1\nPriority: {}\n", - priority - ), + format!("StoreDir: /nix/store\nWantMassQuery: 1\nPriority: {priority}\n"), ) } diff --git a/snix/nix-compat-derive/src/internal/attrs.rs b/snix/nix-compat-derive/src/internal/attrs.rs index cad982a59..79e7f5f41 100644 --- a/snix/nix-compat-derive/src/internal/attrs.rs +++ b/snix/nix-compat-derive/src/internal/attrs.rs @@ -51,7 +51,7 @@ impl Field { } } else { let path = meta.path.to_token_stream().to_string(); - return Err(meta.error(format_args!("unknown nix field attribute '{}'", path))); + return Err(meta.error(format_args!("unknown nix field attribute '{path}'"))); } Ok(()) }) { @@ -86,9 +86,7 @@ impl Variant { } } else { let path = meta.path.to_token_stream().to_string(); - return Err( - meta.error(format_args!("unknown nix variant attribute '{}'", path)) - ); + return Err(meta.error(format_args!("unknown nix variant attribute '{path}'"))); } Ok(()) }) { @@ -149,9 +147,7 @@ impl Container { crate_path = parse_lit(ctx, &meta, CRATE)?; } else { let path = meta.path.to_token_stream().to_string(); - return Err( - meta.error(format_args!("unknown nix variant attribute '{}'", path)) - ); + return Err(meta.error(format_args!("unknown nix variant attribute '{path}'"))); } Ok(()) }) { @@ -190,7 +186,7 @@ pub fn get_lit_str( } else { ctx.error_spanned( expr, - format_args!("expected nix attribute {} to be string", attr), + format_args!("expected nix attribute {attr} to be string"), ); Ok(None) } diff --git a/snix/nix-compat/src/bin/drvfmt.rs b/snix/nix-compat/src/bin/drvfmt.rs index fca22c2cb..2f06a137e 100644 --- a/snix/nix-compat/src/bin/drvfmt.rs +++ b/snix/nix-compat/src/bin/drvfmt.rs @@ -42,6 +42,6 @@ fn main() { .expect("unable to serialize") ); } - Err(e) => eprintln!("unable to parse derivation: {:#?}", e), + Err(e) => eprintln!("unable to parse derivation: {e:#?}"), } } diff --git a/snix/nix-compat/src/derivation/mod.rs b/snix/nix-compat/src/derivation/mod.rs index 9ef0ba566..2b8fc1e78 100644 --- a/snix/nix-compat/src/derivation/mod.rs +++ b/snix/nix-compat/src/derivation/mod.rs @@ -133,7 +133,7 @@ impl Derivation { name: &str, ) -> Result, DerivationError> { // append .drv to the name - let name = format!("{}.drv", name); + let name = format!("{name}.drv"); // collect the list of paths from input_sources and input_derivations // into a (sorted, guaranteed by BTreeSet) list of references @@ -362,6 +362,6 @@ fn ca_kind_prefix(ca_hash: &CAHash) -> &'static str { match ca_hash { CAHash::Flat(_) => "", CAHash::Nar(_) => "r:", - _ => panic!("invalid ca hash in derivation context: {:?}", ca_hash), + _ => panic!("invalid ca hash in derivation context: {ca_hash:?}"), } } diff --git a/snix/nix-compat/src/derivation/tests/mod.rs b/snix/nix-compat/src/derivation/tests/mod.rs index e599a8511..fa933bfd4 100644 --- a/snix/nix-compat/src/derivation/tests/mod.rs +++ b/snix/nix-compat/src/derivation/tests/mod.rs @@ -151,7 +151,7 @@ fn from_aterm_bytes_trailer() { )] #[case::unicode("unicode", "52a9id8hx688hvlnz4d1n25ml1jdykz0-unicode.drv")] fn derivation_path(#[case] name: &str, #[case] expected_path: &str) { - let json_bytes = fs::read(format!("{}/ok/{}.json", RESOURCES_PATHS, expected_path)) + let json_bytes = fs::read(format!("{RESOURCES_PATHS}/ok/{expected_path}.json")) .expect("unable to read JSON"); let derivation: Derivation = serde_json::from_slice(&json_bytes).expect("JSON was not well-formatted"); @@ -194,7 +194,7 @@ fn derivation_without_output_paths(derivation: &Derivation) -> Derivation { fn hash_derivation_modulo_fixed(#[case] drv_path: &str, #[case] expected_digest: [u8; 32]) { // read in the fixture let json_bytes = - fs::read(format!("{}/ok/{}.json", RESOURCES_PATHS, drv_path)).expect("unable to read JSON"); + fs::read(format!("{RESOURCES_PATHS}/ok/{drv_path}.json")).expect("unable to read JSON"); let drv: Derivation = serde_json::from_slice(&json_bytes).expect("must deserialize"); let actual = drv.hash_derivation_modulo(|_| panic!("must not be called")); @@ -220,7 +220,7 @@ fn hash_derivation_modulo_fixed(#[case] drv_path: &str, #[case] expected_digest: fn output_paths(#[case] name: &str, #[case] drv_path_str: &str) { // read in the derivation let expected_derivation = Derivation::from_aterm_bytes( - &fs::read(format!("{}/ok/{}", RESOURCES_PATHS, drv_path_str)).expect("unable to read .drv"), + &fs::read(format!("{RESOURCES_PATHS}/ok/{drv_path_str}")).expect("unable to read .drv"), ) .expect("must succeed"); @@ -410,7 +410,7 @@ fn output_path_construction() { "foo", &foo_drv.hash_derivation_modulo(|drv_path| { if drv_path.to_string() != "0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv" { - panic!("lookup called with unexpected drv_path: {}", drv_path); + panic!("lookup called with unexpected drv_path: {drv_path}"); } bar_drv_hash_derivation_modulo }), diff --git a/snix/nix-compat/src/flakeref/mod.rs b/snix/nix-compat/src/flakeref/mod.rs index 65d9a62cc..ae51db51e 100644 --- a/snix/nix-compat/src/flakeref/mod.rs +++ b/snix/nix-compat/src/flakeref/mod.rs @@ -590,7 +590,7 @@ impl FlakeRef { _ => unreachable!(), }; - let mut url = Url::parse(&format!("{}://{}/{}", scheme, owner, repo)).unwrap(); + let mut url = Url::parse(&format!("{scheme}://{owner}/{repo}")).unwrap(); if let Some(h) = host { url.set_host(Some(h)).unwrap(); } @@ -609,7 +609,7 @@ impl FlakeRef { url } FlakeRef::Indirect { id, r#ref, rev } => { - let mut url = Url::parse(&format!("indirect://{}", id)).unwrap(); + let mut url = Url::parse(&format!("indirect://{id}")).unwrap(); append_params(&mut url, &[("ref", r#ref.clone()), ("rev", rev.clone())]); url } diff --git a/snix/nix-compat/src/nar/reader/async/test.rs b/snix/nix-compat/src/nar/reader/async/test.rs index 7bc1f8942..8cbed960a 100644 --- a/snix/nix-compat/src/nar/reader/async/test.rs +++ b/snix/nix-compat/src/nar/reader/async/test.rs @@ -290,7 +290,7 @@ async fn must_read_file(name: &'static str, entry: nar::reader::Entry<'_, '_>) { assert!(!executable); assert_eq!(reader.read(&mut [0]).await.unwrap(), 0); } - _ => panic!("unexpected type for {}", name), + _ => panic!("unexpected type for {name}"), } } @@ -305,6 +305,6 @@ fn must_be_symlink( nar::reader::Node::Symlink { target } => { assert_eq!(exp_target.as_bytes(), &target); } - _ => panic!("unexpected type for {}", name), + _ => panic!("unexpected type for {name}"), } } diff --git a/snix/nix-compat/src/nar/reader/test.rs b/snix/nix-compat/src/nar/reader/test.rs index 63e4fb289..53d34c40d 100644 --- a/snix/nix-compat/src/nar/reader/test.rs +++ b/snix/nix-compat/src/nar/reader/test.rs @@ -258,7 +258,7 @@ fn must_read_file(name: &'static str, entry: nar::reader::Entry<'_, '_>) { assert!(!executable); assert_eq!(reader.read(&mut [0]).unwrap(), 0); } - _ => panic!("unexpected type for {}", name), + _ => panic!("unexpected type for {name}"), } } @@ -273,6 +273,6 @@ fn must_be_symlink( nar::reader::Node::Symlink { target } => { assert_eq!(exp_target.as_bytes(), &target); } - _ => panic!("unexpected type for {}", name), + _ => panic!("unexpected type for {name}"), } } diff --git a/snix/nix-compat/src/nix_daemon/handler.rs b/snix/nix-compat/src/nix_daemon/handler.rs index 5e88d3f04..f14542280 100644 --- a/snix/nix-compat/src/nix_daemon/handler.rs +++ b/snix/nix-compat/src/nix_daemon/handler.rs @@ -357,7 +357,7 @@ mod tests { } Err(e) => { w.write_value(&STDERR_ERROR).await.unwrap(); - w.write_value(&NixError::new(format!("{:?}", e))) + w.write_value(&NixError::new(format!("{e:?}"))) .await .unwrap(); } diff --git a/snix/nix-compat/src/nix_daemon/types.rs b/snix/nix-compat/src/nix_daemon/types.rs index f4505a616..4fcdb2129 100644 --- a/snix/nix-compat/src/nix_daemon/types.rs +++ b/snix/nix-compat/src/nix_daemon/types.rs @@ -118,7 +118,7 @@ impl NixDeserialize for CAHash { let value: Option = reader.try_read_value().await?; match value { Some(value) => Ok(Some(CAHash::from_nix_hex_str(&value).ok_or_else(|| { - R::Error::invalid_data(format!("Invalid cahash {}", value)) + R::Error::invalid_data(format!("Invalid cahash {value}")) })?)), None => Ok(None), } @@ -137,7 +137,7 @@ impl NixDeserialize for Option { Ok(None) } else { Ok(Some(Some(CAHash::from_nix_hex_str(&value).ok_or_else( - || R::Error::invalid_data(format!("Invalid cahash {}", value)), + || R::Error::invalid_data(format!("Invalid cahash {value}")), )?))) } } diff --git a/snix/nix-compat/src/nix_daemon/worker_protocol.rs b/snix/nix-compat/src/nix_daemon/worker_protocol.rs index 686690f62..b30d6c007 100644 --- a/snix/nix-compat/src/nix_daemon/worker_protocol.rs +++ b/snix/nix-compat/src/nix_daemon/worker_protocol.rs @@ -168,7 +168,7 @@ where if worker_magic_1 != WORKER_MAGIC_1 { Err(std::io::Error::new( ErrorKind::InvalidData, - format!("Incorrect worker magic number received: {}", worker_magic_1), + format!("Incorrect worker magic number received: {worker_magic_1}"), )) } else { conn.write_u64_le(WORKER_MAGIC_2).await?; @@ -182,7 +182,7 @@ where if client_version < ProtocolVersion::from_parts(1, 10) { return Err(Error::new( ErrorKind::Unsupported, - format!("The nix client version {} is too old", client_version), + format!("The nix client version {client_version} is too old"), )); } let picked_version = min(PROTOCOL_VERSION, client_version); @@ -215,7 +215,7 @@ pub async fn read_op(r: &mut R) -> std::io::Result String { - format!("{}{}", STORE_DIR_WITH_SLASH, self) + format!("{STORE_DIR_WITH_SLASH}{self}") } } diff --git a/snix/nix-compat/src/store_path/utils.rs b/snix/nix-compat/src/store_path/utils.rs index 0046e2703..5aabecdd0 100644 --- a/snix/nix-compat/src/store_path/utils.rs +++ b/snix/nix-compat/src/store_path/utils.rs @@ -203,7 +203,7 @@ fn make_references_string, I: IntoIterator>( /// The actual placeholder is basically just a SHA256 hash encoded in /// cppnix format. pub fn hash_placeholder(name: &str) -> String { - let digest = Sha256::new_with_prefix(format!("nix-output:{}", name)).finalize(); + let digest = Sha256::new_with_prefix(format!("nix-output:{name}")).finalize(); format!("/{}", nixbase32::encode(&digest)) } diff --git a/snix/nix-compat/src/wire/bytes/reader/mod.rs b/snix/nix-compat/src/wire/bytes/reader/mod.rs index ae13bff44..2fdc28aec 100644 --- a/snix/nix-compat/src/wire/bytes/reader/mod.rs +++ b/snix/nix-compat/src/wire/bytes/reader/mod.rs @@ -554,7 +554,7 @@ mod tests { let payload = &hex!("FF0102030405060708"); let mut mock = Builder::new() .read(&produce_packet_bytes(payload).await[..offset]) - .read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo")) + .read_error(std::io::Error::other("foo")) .build(); // Either length reading or data reading can fail, depending on which test case we're in. @@ -595,7 +595,7 @@ mod tests { let payload = &hex!("FF0102030405060708"); let mock = Builder::new() .read(&produce_packet_bytes(payload).await[..offset]) - .read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo")) + .read_error(std::io::Error::other("foo")) .build(); let mut mock = BufReader::new(mock); @@ -631,7 +631,7 @@ mod tests { let payload = &hex!("FF0102030405060708"); let mut mock = Builder::new() .read(&produce_packet_bytes(payload).await) - .read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo")) + .read_error(std::io::Error::other("foo")) .build(); let mut r = BytesReader::new(&mut mock, ..MAX_LEN).await.unwrap(); @@ -648,7 +648,7 @@ mod tests { let payload = &hex!("FF0102030405060708"); let mock = Builder::new() .read(&produce_packet_bytes(payload).await) - .read_error(std::io::Error::new(std::io::ErrorKind::Other, "foo")) + .read_error(std::io::Error::other("foo")) .build(); let mut mock = BufReader::new(mock); diff --git a/snix/nix-compat/src/wire/bytes/writer.rs b/snix/nix-compat/src/wire/bytes/writer.rs index 7fc869a59..cb1356123 100644 --- a/snix/nix-compat/src/wire/bytes/writer.rs +++ b/snix/nix-compat/src/wire/bytes/writer.rs @@ -477,7 +477,7 @@ mod tests { let mut mock = Builder::new() .write(&1u32.to_le_bytes()) - .write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿")) + .write_error(std::io::Error::other("🍿")) .build(); let mut w = BytesWriter::new(&mut mock, payload.len() as u64); @@ -492,7 +492,7 @@ mod tests { let mut mock = Builder::new() .write(&1u32.to_le_bytes()) - .write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿")) + .write_error(std::io::Error::other("🍿")) .build(); let mut w = BytesWriter::new(&mut mock, payload.len() as u64); @@ -508,7 +508,7 @@ mod tests { let mut mock = Builder::new() .write(&2u64.to_le_bytes()) .write(&hex!("f0")) - .write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿")) + .write_error(std::io::Error::other("🍿")) .build(); let mut w = BytesWriter::new(&mut mock, payload.len() as u64); @@ -526,7 +526,7 @@ mod tests { .write(&1u64.to_le_bytes()) .write(&hex!("f0")) .write(&hex!("00")) - .write_error(std::io::Error::new(std::io::ErrorKind::Other, "🍿")) + .write_error(std::io::Error::other("🍿")) .build(); let mut w = BytesWriter::new(&mut mock, payload.len() as u64); diff --git a/snix/nix-compat/src/wire/de/mod.rs b/snix/nix-compat/src/wire/de/mod.rs index f85ccd8fe..26ae0164c 100644 --- a/snix/nix-compat/src/wire/de/mod.rs +++ b/snix/nix-compat/src/wire/de/mod.rs @@ -24,7 +24,7 @@ pub trait Error: Sized + StdError { /// Some kind of std::io::Error occured. fn io_error(err: std::io::Error) -> Self { - Self::custom(format_args!("There was an I/O error {}", err)) + Self::custom(format_args!("There was an I/O error {err}")) } /// The data read from `NixRead` is invalid. @@ -41,7 +41,7 @@ pub trait Error: Sized + StdError { impl Error for io::Error { fn custom(msg: T) -> Self { - io::Error::new(io::ErrorKind::Other, msg.to_string()) + io::Error::other(msg.to_string()) } fn io_error(err: std::io::Error) -> Self { diff --git a/snix/nix-compat/src/wire/ser/mock.rs b/snix/nix-compat/src/wire/ser/mock.rs index 15bcea856..2a1f15275 100644 --- a/snix/nix-compat/src/wire/ser/mock.rs +++ b/snix/nix-compat/src/wire/ser/mock.rs @@ -215,7 +215,7 @@ impl Builder { Operation::WriteDisplay(_, Err(Error::WrongWrite(op, OperationType::WriteDisplay))) => { self.write_operation_type(*op) } - s => panic!("Invalid operation {:?}", s), + s => panic!("Invalid operation {s:?}"), } } diff --git a/snix/nix-compat/src/wire/ser/mod.rs b/snix/nix-compat/src/wire/ser/mod.rs index ef3c6e2e3..db54ad798 100644 --- a/snix/nix-compat/src/wire/ser/mod.rs +++ b/snix/nix-compat/src/wire/ser/mod.rs @@ -19,7 +19,7 @@ pub trait Error: Sized + StdError { fn custom(msg: T) -> Self; fn io_error(err: std::io::Error) -> Self { - Self::custom(format_args!("There was an I/O error {}", err)) + Self::custom(format_args!("There was an I/O error {err}")) } fn unsupported_data(msg: T) -> Self { @@ -33,7 +33,7 @@ pub trait Error: Sized + StdError { impl Error for io::Error { fn custom(msg: T) -> Self { - io::Error::new(io::ErrorKind::Other, msg.to_string()) + io::Error::other(msg.to_string()) } fn io_error(err: std::io::Error) -> Self { diff --git a/snix/nix-compat/src/wire/ser/writer.rs b/snix/nix-compat/src/wire/ser/writer.rs index 0e5704ba2..692088262 100644 --- a/snix/nix-compat/src/wire/ser/writer.rs +++ b/snix/nix-compat/src/wire/ser/writer.rs @@ -203,7 +203,7 @@ where } let offset = self.buf.len(); self.buf.put_u64_le(0); - if let Err(err) = write!(self.buf, "{}", msg) { + if let Err(err) = write!(self.buf, "{msg}") { self.buf.truncate(offset); return Err(Self::Error::unsupported_data(err)); } diff --git a/snix/serde/examples/cfg-demo.rs b/snix/serde/examples/cfg-demo.rs index ae98cf2b0..d621eb8e7 100644 --- a/snix/serde/examples/cfg-demo.rs +++ b/snix/serde/examples/cfg-demo.rs @@ -31,5 +31,5 @@ fn main() { // Now you can use snix_serde to deserialise the struct: let foods: Data = snix_serde::from_str(code).expect("deserialisation should succeed"); - println!("These are the foods:\n{:#?}", foods); + println!("These are the foods:\n{foods:#?}"); } diff --git a/snix/serde/examples/nixpkgs.rs b/snix/serde/examples/nixpkgs.rs index 2126fd0f4..ed20c1c8c 100644 --- a/snix/serde/examples/nixpkgs.rs +++ b/snix/serde/examples/nixpkgs.rs @@ -29,6 +29,6 @@ fn main() { match result { Ok(cfg) => println!("Config says: {}:{}", cfg.host, cfg.port), - Err(e) => eprintln!("{:?} / {}", e, e), + Err(e) => eprintln!("{e:?} / {e}"), } } diff --git a/snix/serde/src/error.rs b/snix/serde/src/error.rs index 1b2733ea0..18c793537 100644 --- a/snix/serde/src/error.rs +++ b/snix/serde/src/error.rs @@ -41,16 +41,15 @@ impl Display for Error { match self { Error::Unserializable { value_type } => write!( f, - "can not deserialise a Nix '{}' into a Rust type", - value_type + "can not deserialise a Nix '{value_type}' into a Rust type" ), Error::Unsupported { wanted } => { - write!(f, "can not deserialize a '{}' from a Nix value", wanted) + write!(f, "can not deserialize a '{wanted}' from a Nix value") } Error::UnexpectedType { expected, got } => { - write!(f, "expected type {}, but got Nix type {}", expected, got) + write!(f, "expected type {expected}, but got Nix type {got}") } Error::NixErrors { errors } => { @@ -67,10 +66,10 @@ impl Display for Error { Ok(()) } - Error::Deserialization(err) => write!(f, "deserialisation error occured: {}", err), + Error::Deserialization(err) => write!(f, "deserialisation error occured: {err}"), Error::IntegerConversion { got, need } => { - write!(f, "i64({}) does not fit in a {}", got, need) + write!(f, "i64({got}) does not fit in a {need}") } Error::AmbiguousEnum => write!(f, "could not determine enum variant: ambiguous keys"), diff --git a/snix/store/src/bin/snix-store.rs b/snix/store/src/bin/snix-store.rs index 88c99735b..3642fa8a3 100644 --- a/snix/store/src/bin/snix-store.rs +++ b/snix/store/src/bin/snix-store.rs @@ -287,7 +287,7 @@ async fn run_cli( async move { let span = Span::current(); span.pb_set_style(&snix_tracing::PB_SPINNER_STYLE); - span.pb_set_message(&format!("Ingesting {:?}", path)); + span.pb_set_message(&format!("Ingesting {path:?}")); span.pb_start(); // Ingest the contents at the given path into castore. @@ -300,7 +300,7 @@ async fn run_cli( .await .map_err(std::io::Error::custom)?; - span.pb_set_message(&format!("NAR Calculation for {:?}", path)); + span.pb_set_message(&format!("NAR Calculation for {path:?}")); // Ask for the NAR size and sha256 let (nar_size, nar_sha256) = diff --git a/snix/store/src/import.rs b/snix/store/src/import.rs index f553af2d7..1e9f5cb2a 100644 --- a/snix/store/src/import.rs +++ b/snix/store/src/import.rs @@ -94,7 +94,7 @@ where let root_node = ingest_path::<_, _, _, &[u8]>(blob_service, directory_service, path.as_ref(), None) .await - .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; + .map_err(std::io::Error::other)?; // Ask for the NAR size and sha256 let (nar_size, nar_sha256) = nar_calculation_service.calculate_nar(&root_node).await?; @@ -108,7 +108,7 @@ where store_path::build_ca_path(name, &ca, std::iter::empty::<&str>(), false).map_err(|_| { std::io::Error::new( std::io::ErrorKind::InvalidData, - format!("invalid name: {}", name), + format!("invalid name: {name}"), ) })?; diff --git a/snix/store/src/nar/renderer.rs b/snix/store/src/nar/renderer.rs index c2ffea459..957d652dc 100644 --- a/snix/store/src/nar/renderer.rs +++ b/snix/store/src/nar/renderer.rs @@ -39,7 +39,7 @@ where self.directory_service.clone(), ) .await - .map_err(|e| snix_castore::Error::StorageError(format!("failed rendering nar: {}", e))) + .map_err(|e| snix_castore::Error::StorageError(format!("failed rendering nar: {e}"))) } } diff --git a/snix/store/src/nar/seekable.rs b/snix/store/src/nar/seekable.rs index b4453bddb..70fcc5cda 100644 --- a/snix/store/src/nar/seekable.rs +++ b/snix/store/src/nar/seekable.rs @@ -245,7 +245,7 @@ impl tokio::io::AsyncSeek for Reader { let this = &mut *self; if this.seeking { - return Err(io::Error::new(io::ErrorKind::Other, "Already seeking")); + return Err(io::Error::other("Already seeking")); } this.seeking = true; diff --git a/snix/store/src/pathinfoservice/bigtable.rs b/snix/store/src/pathinfoservice/bigtable.rs index 5442adccf..cd6619be1 100644 --- a/snix/store/src/pathinfoservice/bigtable.rs +++ b/snix/store/src/pathinfoservice/bigtable.rs @@ -200,7 +200,7 @@ impl PathInfoService for BigtablePathInfoService { let mut response = client .read_rows(request) .await - .map_err(|e| Error::StorageError(format!("unable to read rows: {}", e)))?; + .map_err(|e| Error::StorageError(format!("unable to read rows: {e}")))?; if response.len() != 1 { if response.len() > 1 { @@ -241,7 +241,7 @@ impl PathInfoService for BigtablePathInfoService { // Try to parse the value into a PathInfo message let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value)) - .map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?; + .map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {e}")))?; let path_info = PathInfo::try_from(path_info_proto) .map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?; @@ -294,7 +294,7 @@ impl PathInfoService for BigtablePathInfoService { ], }) .await - .map_err(|e| Error::StorageError(format!("unable to mutate rows: {}", e)))?; + .map_err(|e| Error::StorageError(format!("unable to mutate rows: {e}")))?; if resp.predicate_matched { trace!("already existed") @@ -321,12 +321,12 @@ impl PathInfoService for BigtablePathInfoService { let mut rows = client .stream_rows(request) .await - .map_err(|e| Error::StorageError(format!("unable to read rows: {}", e)))?.enumerate(); + .map_err(|e| Error::StorageError(format!("unable to read rows: {e}")))?.enumerate(); use futures::stream::StreamExt; while let Some((i, elem)) = rows.next().await { - let (row_key, mut cells) = elem.map_err(|e| Error::StorageError(format!("unable to stream row {}: {}", i, e)))?; + let (row_key, mut cells) = elem.map_err(|e| Error::StorageError(format!("unable to stream row {i}: {e}")))?; let span = Span::current(); span.record("row.key", bstr::BStr::new(&row_key).to_string()); @@ -351,7 +351,7 @@ impl PathInfoService for BigtablePathInfoService { // Try to parse the value into a PathInfo message. let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value)) - .map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?; + .map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {e}")))?; let path_info = PathInfo::try_from(path_info_proto).map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?; @@ -449,7 +449,7 @@ impl TryFrom for BigtableParameters { .append_pair("instance_name", &instance_name); let params: BigtableParameters = serde_qs::from_str(url.query().unwrap_or_default()) - .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {}", e)))?; + .map_err(|e| Error::InvalidRequest(format!("failed to parse parameters: {e}")))?; Ok(params) } diff --git a/snix/store/src/pathinfoservice/from_addr.rs b/snix/store/src/pathinfoservice/from_addr.rs index 98b31ce5c..26783d7b2 100644 --- a/snix/store/src/pathinfoservice/from_addr.rs +++ b/snix/store/src/pathinfoservice/from_addr.rs @@ -36,7 +36,7 @@ pub async fn from_addr( ) -> Result, Box> { #[allow(unused_mut)] let mut url = - Url::parse(uri).map_err(|e| Error::StorageError(format!("unable to parse url: {}", e)))?; + Url::parse(uri).map_err(|e| Error::StorageError(format!("unable to parse url: {e}")))?; let path_info_service_config = with_registry(®, || { >>>::try_from( diff --git a/snix/store/src/pathinfoservice/nix_http.rs b/snix/store/src/pathinfoservice/nix_http.rs index f8cf71ce1..d8f90bc43 100644 --- a/snix/store/src/pathinfoservice/nix_http.rs +++ b/snix/store/src/pathinfoservice/nix_http.rs @@ -212,7 +212,7 @@ where &narinfo.ca, ) .await - .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?; + .map_err(io::Error::other)?; // ensure the ingested narhash and narsize do actually match. if narinfo.nar_size != nar_size { diff --git a/snix/store/src/tests/nar_renderer.rs b/snix/store/src/tests/nar_renderer.rs index bb4e088ed..d1e2ee589 100644 --- a/snix/store/src/tests/nar_renderer.rs +++ b/snix/store/src/tests/nar_renderer.rs @@ -31,7 +31,7 @@ async fn single_file_missing_blob( crate::nar::RenderError::NARWriterError(e) => { assert_eq!(io::ErrorKind::NotFound, e.kind()); } - _ => panic!("unexpected error: {:?}", e), + _ => panic!("unexpected error: {e:?}"), } } @@ -60,10 +60,10 @@ async fn seekable( (Ok(_), Err(_)) => panic!("creating reader should have failed but succeeded"), (Ok(_), Ok(Err(_))) => panic!("creating reader should have failed but succeeded"), (Err(err), Ok(Ok(_))) => { - panic!("creating reader should have succeeded but failed: {}", err) + panic!("creating reader should have succeeded but failed: {err}") } (Err(reader_err), Err(expected_err)) => { - assert_eq!(format!("{}", reader_err), format!("{}", expected_err)); + assert_eq!(format!("{reader_err}"), format!("{}", expected_err)); } (Err(reader_err), Ok(Err(expected_err))) => { let crate::nar::RenderError::NARWriterError(e) = reader_err else { diff --git a/snix/store/src/tests/nar_renderer_seekable.rs b/snix/store/src/tests/nar_renderer_seekable.rs index 989a8e90a..4fab6c89e 100644 --- a/snix/store/src/tests/nar_renderer_seekable.rs +++ b/snix/store/src/tests/nar_renderer_seekable.rs @@ -30,9 +30,9 @@ async fn read_to_end( match (reader_result, test_output) { (Ok(_), Err(_)) => panic!("creating reader should have failed but succeeded"), - (Err(err), Ok(_)) => panic!("creating reader should have succeeded but failed: {}", err), + (Err(err), Ok(_)) => panic!("creating reader should have succeeded but failed: {err}"), (Err(reader_err), Err(expected_err)) => { - assert_eq!(format!("{}", reader_err), format!("{}", expected_err)); + assert_eq!(format!("{reader_err}"), format!("{}", expected_err)); } (Ok(mut reader), Ok(expected_read_result)) => { let mut buf: Vec = vec![]; @@ -41,7 +41,7 @@ async fn read_to_end( match (read_result, expected_read_result) { (Ok(_), Err(_)) => panic!("read_to_end should have failed but succeeded"), (Err(err), Ok(_)) => { - panic!("read_to_end should have succeeded but failed: {}", err) + panic!("read_to_end should have succeeded but failed: {err}") } (Err(read_err), Err(expected_read_err)) => { assert_eq!(read_err.kind(), expected_read_err);