chore(snix/castore): edition 2021 -> 2024

Change-Id: I7c5998f720e3c8216a93e7274a78e5352aeffbca
Reviewed-on: https://cl.snix.dev/c/snix/+/30135
Reviewed-by: edef . <edef@edef.eu>
Reviewed-by: Florian Klink <flokli@flokli.de>
Tested-by: besadii
Autosubmit: Ilan Joselevich <personal@ilanjoselevich.com>
This commit is contained in:
Ilan Joselevich 2025-03-19 00:32:46 +00:00 committed by clbot
parent 1621ae21e7
commit fd9c9572e9
12 changed files with 15 additions and 13 deletions

View file

@ -13574,7 +13574,7 @@ rec {
"snix-castore" = rec { "snix-castore" = rec {
crateName = "snix-castore"; crateName = "snix-castore";
version = "0.1.0"; version = "0.1.0";
edition = "2021"; edition = "2024";
src = lib.cleanSourceWith { filter = sourceFilter; src = ./castore; }; src = lib.cleanSourceWith { filter = sourceFilter; src = ./castore; };
libName = "snix_castore"; libName = "snix_castore";
dependencies = [ dependencies = [

View file

@ -1,7 +1,7 @@
[package] [package]
name = "snix-castore" name = "snix-castore"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2024"
[dependencies] [dependencies]
async-compression = { workspace = true, features = ["tokio", "zstd"] } async-compression = { workspace = true, features = ["tokio", "zstd"] }

View file

@ -284,7 +284,7 @@ impl<W: tokio::io::AsyncWrite + Unpin> tokio::io::AsyncWrite for GRPCBlobWriter<
io::ErrorKind::NotConnected, io::ErrorKind::NotConnected,
"already closed", "already closed",
))), ))),
Some((_, ref mut writer)) => { Some((_, writer)) => {
let pinned_writer = pin!(writer); let pinned_writer = pin!(writer);
pinned_writer.poll_write(cx, buf) pinned_writer.poll_write(cx, buf)
} }
@ -300,7 +300,7 @@ impl<W: tokio::io::AsyncWrite + Unpin> tokio::io::AsyncWrite for GRPCBlobWriter<
io::ErrorKind::NotConnected, io::ErrorKind::NotConnected,
"already closed", "already closed",
))), ))),
Some((_, ref mut writer)) => { Some((_, writer)) => {
let pinned_writer = pin!(writer); let pinned_writer = pin!(writer);
pinned_writer.poll_flush(cx) pinned_writer.poll_flush(cx)
} }

View file

@ -99,7 +99,7 @@ impl tokio::io::AsyncWrite for MemoryBlobWriter {
io::ErrorKind::NotConnected, io::ErrorKind::NotConnected,
"already closed", "already closed",
)), )),
Some((ref mut buf, ref mut hasher)) => { Some((buf, hasher)) => {
let bytes_written = buf.write(b)?; let bytes_written = buf.write(b)?;
hasher.write(&b[..bytes_written]) hasher.write(&b[..bytes_written])
} }

View file

@ -567,7 +567,7 @@ where
None => { None => {
// called a second time, return self.fut_output. // called a second time, return self.fut_output.
match self.fut_output.as_ref().unwrap() { match self.fut_output.as_ref().unwrap() {
Ok(ref b3_digest) => Ok(b3_digest.clone()), Ok(b3_digest) => Ok(b3_digest.clone()),
Err(e) => Err(std::io::Error::new(e.kind(), e.to_string())), Err(e) => Err(std::io::Error::new(e.kind(), e.to_string())),
} }
} }

View file

@ -129,7 +129,7 @@ impl DirectoryService for ObjectStoreDirectoryService {
let stream = match object_store.get(&dir_path).await { let stream = match object_store.get(&dir_path).await {
Ok(v) => v.into_stream(), Ok(v) => v.into_stream(),
Err(object_store::Error::NotFound { .. }) => { Err(object_store::Error::NotFound { .. }) => {
return Ok(Either::Left(futures::stream::empty())) return Ok(Either::Left(futures::stream::empty()));
} }
Err(e) => return Err(std::io::Error::from(e).into()), Err(e) => return Err(std::io::Error::from(e).into()),
}; };

View file

@ -46,7 +46,7 @@ impl InodeData {
InodeData::Regular(_, size, _) => *size, InodeData::Regular(_, size, _) => *size,
InodeData::Symlink(target) => target.len() as u64, InodeData::Symlink(target) => target.len() as u64,
InodeData::Directory(DirectoryInodeData::Sparse(_, size)) => *size, InodeData::Directory(DirectoryInodeData::Sparse(_, size)) => *size,
InodeData::Directory(DirectoryInodeData::Populated(_, ref children)) => { InodeData::Directory(DirectoryInodeData::Populated(_, children)) => {
children.len() as u64 children.len() as u64
} }
}, },

View file

@ -89,7 +89,7 @@ where
size: directory_size, size: directory_size,
} }
} }
IngestionEntry::Symlink { ref target, .. } => Node::Symlink { &mut IngestionEntry::Symlink { ref target, .. } => Node::Symlink {
target: bytes::Bytes::copy_from_slice(target).try_into().map_err( target: bytes::Bytes::copy_from_slice(target).try_into().map_err(
|e: SymlinkTargetError| { |e: SymlinkTargetError| {
IngestionError::UploadDirectoryError( IngestionError::UploadDirectoryError(

View file

@ -115,7 +115,7 @@ fn check_insert_node(
e.insert(node); e.insert(node);
} }
btree_map::Entry::Occupied(occupied) => { btree_map::Entry::Occupied(occupied) => {
return Err(DirectoryError::DuplicateName(occupied.key().to_owned())) return Err(DirectoryError::DuplicateName(occupied.key().to_owned()));
} }
} }

View file

@ -278,7 +278,7 @@ impl PathBuf {
return Err(std::io::Error::new( return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData, std::io::ErrorKind::InvalidData,
"found disallowed prefix or rootdir", "found disallowed prefix or rootdir",
)) ));
} }
std::path::Component::CurDir => continue, // ignore std::path::Component::CurDir => continue, // ignore
std::path::Component::ParentDir => { std::path::Component::ParentDir => {

View file

@ -70,7 +70,9 @@ unsafe impl<const N: usize> bytes::BufMut for BytesMutWithDefaultCapacity<N> {
} }
unsafe fn advance_mut(&mut self, cnt: usize) { unsafe fn advance_mut(&mut self, cnt: usize) {
self.inner.advance_mut(cnt); unsafe {
self.inner.advance_mut(cnt);
}
} }
fn chunk_mut(&mut self) -> &mut bytes::buf::UninitSlice { fn chunk_mut(&mut self) -> &mut bytes::buf::UninitSlice {

View file

@ -36,7 +36,7 @@ where
.ok_or_else(|| Status::invalid_argument("invalid by_what"))?; .ok_or_else(|| Status::invalid_argument("invalid by_what"))?;
match by_what { match by_what {
proto::get_directory_request::ByWhat::Digest(ref digest) => { proto::get_directory_request::ByWhat::Digest(digest) => {
let digest: B3Digest = digest let digest: B3Digest = digest
.clone() .clone()
.try_into() .try_into()