refactor(tvix/castore): B3Digest UX improvements
- Switch from B3_LEN to B3Digest::LENGTH - make repr(transparent) with underlying [u8; Self::LENGTH] - impl AsRef<[u8; B3Digest::LENGTH]> for B3Digest - impl Deref<Target = [u8; Self::LENGTH]> for B3Digest Change-Id: If4ff077bbd79de1fce4201259bda46c839a7765a Reviewed-on: https://cl.tvl.fyi/c/depot/+/13258 Tested-by: BuildkiteCI Reviewed-by: Ilan Joselevich <personal@ilanjoselevich.com> Autosubmit: flokli <flokli@flokli.de> Reviewed-by: edef <edef@edef.eu>
This commit is contained in:
parent
84f69cb73a
commit
e4999b3a7d
4 changed files with 31 additions and 23 deletions
|
|
@ -78,8 +78,8 @@ fn derive_blob_path(base_path: &Path, digest: &B3Digest) -> Path {
|
||||||
base_path
|
base_path
|
||||||
.child("blobs")
|
.child("blobs")
|
||||||
.child("b3")
|
.child("b3")
|
||||||
.child(HEXLOWER.encode(&digest.as_slice()[..2]))
|
.child(HEXLOWER.encode(&digest[..2]))
|
||||||
.child(HEXLOWER.encode(digest.as_slice()))
|
.child(HEXLOWER.encode(&digest[..]))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level=Level::TRACE, skip_all,fields(base_path=%base_path,chunk.digest=%digest),ret(Display))]
|
#[instrument(level=Level::TRACE, skip_all,fields(base_path=%base_path,chunk.digest=%digest),ret(Display))]
|
||||||
|
|
@ -87,8 +87,8 @@ fn derive_chunk_path(base_path: &Path, digest: &B3Digest) -> Path {
|
||||||
base_path
|
base_path
|
||||||
.child("chunks")
|
.child("chunks")
|
||||||
.child("b3")
|
.child("b3")
|
||||||
.child(HEXLOWER.encode(&digest.as_slice()[..2]))
|
.child(HEXLOWER.encode(&digest[..2]))
|
||||||
.child(HEXLOWER.encode(digest.as_slice()))
|
.child(HEXLOWER.encode(&digest[..]))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
|
|
|
||||||
|
|
@ -2,10 +2,13 @@ use bytes::Bytes;
|
||||||
use data_encoding::BASE64;
|
use data_encoding::BASE64;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
pub const B3_LEN: usize = blake3::OUT_LEN;
|
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
pub struct B3Digest([u8; B3_LEN]);
|
#[repr(transparent)]
|
||||||
|
pub struct B3Digest([u8; Self::LENGTH]);
|
||||||
|
|
||||||
|
impl B3Digest {
|
||||||
|
pub const LENGTH: usize = blake3::OUT_LEN;
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: allow converting these errors to crate::Error
|
// TODO: allow converting these errors to crate::Error
|
||||||
#[derive(Error, Debug, PartialEq)]
|
#[derive(Error, Debug, PartialEq)]
|
||||||
|
|
@ -14,9 +17,17 @@ pub enum Error {
|
||||||
InvalidDigestLen(usize),
|
InvalidDigestLen(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl B3Digest {
|
impl AsRef<[u8; B3Digest::LENGTH]> for B3Digest {
|
||||||
pub fn as_slice(&self) -> &[u8] {
|
fn as_ref(&self) -> &[u8; Self::LENGTH] {
|
||||||
&self.0[..]
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::Deref for B3Digest {
|
||||||
|
type Target = [u8; Self::LENGTH];
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -67,13 +78,13 @@ impl TryFrom<Vec<u8>> for B3Digest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&[u8; B3_LEN]> for B3Digest {
|
impl From<&[u8; B3Digest::LENGTH]> for B3Digest {
|
||||||
fn from(value: &[u8; B3_LEN]) -> Self {
|
fn from(value: &[u8; B3Digest::LENGTH]) -> Self {
|
||||||
Self(*value)
|
Self(*value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<B3Digest> for [u8; B3_LEN] {
|
impl From<B3Digest> for [u8; B3Digest::LENGTH] {
|
||||||
fn from(value: B3Digest) -> Self {
|
fn from(value: B3Digest) -> Self {
|
||||||
value.0
|
value.0
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,10 +11,10 @@ use super::{
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
composition::{CompositionContext, ServiceBuilder},
|
composition::{CompositionContext, ServiceBuilder},
|
||||||
digests, proto, B3Digest, Error,
|
proto, B3Digest, Error,
|
||||||
};
|
};
|
||||||
|
|
||||||
const DIRECTORY_TABLE: TableDefinition<[u8; digests::B3_LEN], Vec<u8>> =
|
const DIRECTORY_TABLE: TableDefinition<[u8; B3Digest::LENGTH], Vec<u8>> =
|
||||||
TableDefinition::new("directory");
|
TableDefinition::new("directory");
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
|
@ -81,11 +81,11 @@ impl DirectoryService for RedbDirectoryService {
|
||||||
|
|
||||||
// Retrieves the protobuf-encoded Directory for the corresponding digest.
|
// Retrieves the protobuf-encoded Directory for the corresponding digest.
|
||||||
let db_get_resp = tokio::task::spawn_blocking({
|
let db_get_resp = tokio::task::spawn_blocking({
|
||||||
let digest_as_array: [u8; digests::B3_LEN] = digest.to_owned().into();
|
let digest = *digest.as_ref();
|
||||||
move || -> Result<_, redb::Error> {
|
move || -> Result<_, redb::Error> {
|
||||||
let txn = db.begin_read()?;
|
let txn = db.begin_read()?;
|
||||||
let table = txn.open_table(DIRECTORY_TABLE)?;
|
let table = txn.open_table(DIRECTORY_TABLE)?;
|
||||||
Ok(table.get(digest_as_array)?)
|
Ok(table.get(digest)?)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await?
|
.await?
|
||||||
|
|
@ -139,9 +139,8 @@ impl DirectoryService for RedbDirectoryService {
|
||||||
let txn = db.begin_write()?;
|
let txn = db.begin_write()?;
|
||||||
{
|
{
|
||||||
let mut table = txn.open_table(DIRECTORY_TABLE)?;
|
let mut table = txn.open_table(DIRECTORY_TABLE)?;
|
||||||
let digest_as_array: [u8; digests::B3_LEN] = digest.clone().into();
|
|
||||||
table.insert(
|
table.insert(
|
||||||
digest_as_array,
|
digest.as_ref(),
|
||||||
proto::Directory::from(directory).encode_to_vec(),
|
proto::Directory::from(directory).encode_to_vec(),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
@ -225,10 +224,8 @@ impl DirectoryPutter for RedbDirectoryPutter<'_> {
|
||||||
// Looping over all the verified directories, queuing them up for a
|
// Looping over all the verified directories, queuing them up for a
|
||||||
// batch insertion.
|
// batch insertion.
|
||||||
for directory in directories {
|
for directory in directories {
|
||||||
let digest_as_array: [u8; digests::B3_LEN] =
|
|
||||||
directory.digest().into();
|
|
||||||
table.insert(
|
table.insert(
|
||||||
digest_as_array,
|
directory.digest().as_ref(),
|
||||||
proto::Directory::from(directory).encode_to_vec(),
|
proto::Directory::from(directory).encode_to_vec(),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ pub mod tonic;
|
||||||
// Used as user agent in various HTTP Clients
|
// Used as user agent in various HTTP Clients
|
||||||
const USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
|
const USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
pub use digests::{B3Digest, B3_LEN};
|
pub use digests::B3Digest;
|
||||||
pub use errors::{DirectoryError, Error, ValidateNodeError};
|
pub use errors::{DirectoryError, Error, ValidateNodeError};
|
||||||
pub use hashing_reader::{B3HashingReader, HashingReader};
|
pub use hashing_reader::{B3HashingReader, HashingReader};
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue