refactor(tvix/castore): use Directory struct separate from proto one
This uses our own data type to deal with Directories in the castore model. It makes some undesired states unrepresentable, removing the need for conversions and checking in various places: - In the protobuf, blake3 digests could have a wrong length, as proto doesn't know fixed-size fields. We now use `B3Digest`, which makes cloning cheaper, and removes the need to do size-checking everywhere. - In the protobuf, we had three different lists for `files`, `symlinks` and `directories`. This was mostly a protobuf size optimization, but made interacting with them a bit awkward. This has now been replaced with a list of enums, and convenience iterators to get various nodes, and add new ones. Change-Id: I7b92691bb06d77ff3f58a5ccea94a22c16f84f04 Reviewed-on: https://cl.tvl.fyi/c/depot/+/12057 Tested-by: BuildkiteCI Reviewed-by: flokli <flokli@flokli.de>
This commit is contained in:
parent
5d3f3158d6
commit
3ca0b53840
53 changed files with 1429 additions and 1377 deletions
|
|
@ -352,7 +352,7 @@ async fn run_cli(cli: Cli) -> Result<(), Box<dyn std::error::Error + Send + Sync
|
|||
// annotated with information we have from the reference graph.
|
||||
let path_info = PathInfo {
|
||||
node: Some(tvix_castore::proto::Node {
|
||||
node: Some(root_node),
|
||||
node: Some((&root_node).into()),
|
||||
}),
|
||||
references: Vec::from_iter(
|
||||
elem.references.iter().map(|e| e.digest().to_vec().into()),
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
use std::path::Path;
|
||||
use tracing::{debug, instrument};
|
||||
use tvix_castore::{
|
||||
blobservice::BlobService, directoryservice::DirectoryService, import::fs::ingest_path,
|
||||
proto::node::Node, B3Digest,
|
||||
blobservice::BlobService,
|
||||
directoryservice::{DirectoryService, NamedNode, Node},
|
||||
import::fs::ingest_path,
|
||||
};
|
||||
|
||||
use nix_compat::{
|
||||
|
|
@ -32,24 +33,24 @@ pub fn log_node(node: &Node, path: &Path) {
|
|||
Node::Directory(directory_node) => {
|
||||
debug!(
|
||||
path = ?path,
|
||||
name = ?directory_node.name,
|
||||
digest = %B3Digest::try_from(directory_node.digest.clone()).unwrap(),
|
||||
name = ?directory_node.get_name(),
|
||||
digest = %directory_node.digest(),
|
||||
"import successful",
|
||||
)
|
||||
}
|
||||
Node::File(file_node) => {
|
||||
debug!(
|
||||
path = ?path,
|
||||
name = ?file_node.name,
|
||||
digest = %B3Digest::try_from(file_node.digest.clone()).unwrap(),
|
||||
name = ?file_node.get_name(),
|
||||
digest = %file_node.digest(),
|
||||
"import successful"
|
||||
)
|
||||
}
|
||||
Node::Symlink(symlink_node) => {
|
||||
debug!(
|
||||
path = ?path,
|
||||
name = ?symlink_node.name,
|
||||
target = ?symlink_node.target,
|
||||
name = ?symlink_node.get_name(),
|
||||
target = ?symlink_node.target(),
|
||||
"import successful"
|
||||
)
|
||||
}
|
||||
|
|
@ -87,7 +88,7 @@ pub fn derive_nar_ca_path_info(
|
|||
// assemble the [crate::proto::PathInfo] object.
|
||||
PathInfo {
|
||||
node: Some(tvix_castore::proto::Node {
|
||||
node: Some(root_node),
|
||||
node: Some((&root_node).into()),
|
||||
}),
|
||||
// There's no reference scanning on path contents ingested like this.
|
||||
references: vec![],
|
||||
|
|
|
|||
|
|
@ -7,12 +7,11 @@ use tokio::{
|
|||
};
|
||||
use tvix_castore::{
|
||||
blobservice::BlobService,
|
||||
directoryservice::DirectoryService,
|
||||
directoryservice::{DirectoryService, NamedNode, Node},
|
||||
import::{
|
||||
blobs::{self, ConcurrentBlobUploader},
|
||||
ingest_entries, IngestionEntry, IngestionError,
|
||||
},
|
||||
proto::{node::Node, NamedNode},
|
||||
PathBuf,
|
||||
};
|
||||
|
||||
|
|
@ -99,7 +98,7 @@ where
|
|||
let (_, node) = try_join!(produce, consume)?;
|
||||
|
||||
// remove the fake "root" name again
|
||||
debug_assert_eq!(&node.get_name(), b"root");
|
||||
debug_assert_eq!(&node.get_name()[..], b"root");
|
||||
Ok(node.rename("".into()))
|
||||
}
|
||||
|
||||
|
|
@ -172,12 +171,13 @@ mod test {
|
|||
use rstest::*;
|
||||
use tokio_stream::StreamExt;
|
||||
use tvix_castore::blobservice::BlobService;
|
||||
use tvix_castore::directoryservice::DirectoryService;
|
||||
use tvix_castore::directoryservice::{
|
||||
Directory, DirectoryNode, DirectoryService, FileNode, Node, SymlinkNode,
|
||||
};
|
||||
use tvix_castore::fixtures::{
|
||||
DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP, EMPTY_BLOB_DIGEST, HELLOWORLD_BLOB_CONTENTS,
|
||||
HELLOWORLD_BLOB_DIGEST,
|
||||
};
|
||||
use tvix_castore::proto as castorepb;
|
||||
|
||||
use crate::tests::fixtures::{
|
||||
blob_service, directory_service, NAR_CONTENTS_COMPLICATED, NAR_CONTENTS_HELLOWORLD,
|
||||
|
|
@ -199,10 +199,13 @@ mod test {
|
|||
.expect("must parse");
|
||||
|
||||
assert_eq!(
|
||||
castorepb::node::Node::Symlink(castorepb::SymlinkNode {
|
||||
name: "".into(), // name must be empty
|
||||
target: "/nix/store/somewhereelse".into(),
|
||||
}),
|
||||
Node::Symlink(
|
||||
SymlinkNode::new(
|
||||
"".into(), // name must be empty
|
||||
"/nix/store/somewhereelse".into(),
|
||||
)
|
||||
.unwrap()
|
||||
),
|
||||
root_node
|
||||
);
|
||||
}
|
||||
|
|
@ -222,12 +225,15 @@ mod test {
|
|||
.expect("must parse");
|
||||
|
||||
assert_eq!(
|
||||
castorepb::node::Node::File(castorepb::FileNode {
|
||||
name: "".into(), // name must be empty
|
||||
digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
|
||||
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
||||
executable: false,
|
||||
}),
|
||||
Node::File(
|
||||
FileNode::new(
|
||||
"".into(), // name must be empty
|
||||
HELLOWORLD_BLOB_DIGEST.clone(),
|
||||
HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
||||
false,
|
||||
)
|
||||
.unwrap()
|
||||
),
|
||||
root_node
|
||||
);
|
||||
|
||||
|
|
@ -250,11 +256,14 @@ mod test {
|
|||
.expect("must parse");
|
||||
|
||||
assert_eq!(
|
||||
castorepb::node::Node::Directory(castorepb::DirectoryNode {
|
||||
name: "".into(), // name must be empty
|
||||
digest: DIRECTORY_COMPLICATED.digest().into(),
|
||||
size: DIRECTORY_COMPLICATED.size(),
|
||||
}),
|
||||
Node::Directory(
|
||||
DirectoryNode::new(
|
||||
"".into(), // name must be empty
|
||||
DIRECTORY_COMPLICATED.digest(),
|
||||
DIRECTORY_COMPLICATED.size(),
|
||||
)
|
||||
.unwrap()
|
||||
),
|
||||
root_node,
|
||||
);
|
||||
|
||||
|
|
@ -262,7 +271,7 @@ mod test {
|
|||
assert!(blob_service.has(&EMPTY_BLOB_DIGEST).await.unwrap());
|
||||
|
||||
// directoryservice must contain the directories, at least with get_recursive.
|
||||
let resp: Result<Vec<castorepb::Directory>, _> = directory_service
|
||||
let resp: Result<Vec<Directory>, _> = directory_service
|
||||
.get_recursive(&DIRECTORY_COMPLICATED.digest())
|
||||
.collect()
|
||||
.await;
|
||||
|
|
|
|||
|
|
@ -8,16 +8,14 @@ pub use import::ingest_nar_and_hash;
|
|||
pub use renderer::calculate_size_and_sha256;
|
||||
pub use renderer::write_nar;
|
||||
pub use renderer::SimpleRenderer;
|
||||
use tvix_castore::proto as castorepb;
|
||||
use tvix_castore::directoryservice::Node;
|
||||
|
||||
#[async_trait]
|
||||
pub trait NarCalculationService: Send + Sync {
|
||||
/// Return the nar size and nar sha256 digest for a given root node.
|
||||
/// This can be used to calculate NAR-based output paths.
|
||||
async fn calculate_nar(
|
||||
&self,
|
||||
root_node: &castorepb::node::Node,
|
||||
) -> Result<(u64, [u8; 32]), tvix_castore::Error>;
|
||||
async fn calculate_nar(&self, root_node: &Node)
|
||||
-> Result<(u64, [u8; 32]), tvix_castore::Error>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
|
@ -27,7 +25,7 @@ where
|
|||
{
|
||||
async fn calculate_nar(
|
||||
&self,
|
||||
root_node: &castorepb::node::Node,
|
||||
root_node: &Node,
|
||||
) -> Result<(u64, [u8; 32]), tvix_castore::Error> {
|
||||
self.as_ref().calculate_nar(root_node).await
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,8 +10,7 @@ use tracing::{instrument, Span};
|
|||
use tracing_indicatif::span_ext::IndicatifSpanExt;
|
||||
use tvix_castore::{
|
||||
blobservice::BlobService,
|
||||
directoryservice::DirectoryService,
|
||||
proto::{self as castorepb, NamedNode},
|
||||
directoryservice::{DirectoryService, NamedNode, Node},
|
||||
};
|
||||
|
||||
pub struct SimpleRenderer<BS, DS> {
|
||||
|
|
@ -36,7 +35,7 @@ where
|
|||
{
|
||||
async fn calculate_nar(
|
||||
&self,
|
||||
root_node: &castorepb::node::Node,
|
||||
root_node: &Node,
|
||||
) -> Result<(u64, [u8; 32]), tvix_castore::Error> {
|
||||
calculate_size_and_sha256(
|
||||
root_node,
|
||||
|
|
@ -52,7 +51,7 @@ where
|
|||
/// NAR output.
|
||||
#[instrument(skip_all, fields(indicatif.pb_show=1))]
|
||||
pub async fn calculate_size_and_sha256<BS, DS>(
|
||||
root_node: &castorepb::node::Node,
|
||||
root_node: &Node,
|
||||
blob_service: BS,
|
||||
directory_service: DS,
|
||||
) -> Result<(u64, [u8; 32]), RenderError>
|
||||
|
|
@ -80,13 +79,13 @@ where
|
|||
Ok((cw.count(), h.finalize().into()))
|
||||
}
|
||||
|
||||
/// Accepts a [castorepb::node::Node] pointing to the root of a (store) path,
|
||||
/// Accepts a [Node] pointing to the root of a (store) path,
|
||||
/// and uses the passed blob_service and directory_service to perform the
|
||||
/// necessary lookups as it traverses the structure.
|
||||
/// The contents in NAR serialization are writen to the passed [AsyncWrite].
|
||||
pub async fn write_nar<W, BS, DS>(
|
||||
mut w: W,
|
||||
proto_root_node: &castorepb::node::Node,
|
||||
proto_root_node: &Node,
|
||||
blob_service: BS,
|
||||
directory_service: DS,
|
||||
) -> Result<(), RenderError>
|
||||
|
|
@ -115,7 +114,7 @@ where
|
|||
/// This consumes the node.
|
||||
async fn walk_node<BS, DS>(
|
||||
nar_node: nar_writer::Node<'_, '_>,
|
||||
proto_node: &castorepb::node::Node,
|
||||
proto_node: &Node,
|
||||
blob_service: BS,
|
||||
directory_service: DS,
|
||||
) -> Result<(BS, DS), RenderError>
|
||||
|
|
@ -124,23 +123,17 @@ where
|
|||
DS: DirectoryService + Send,
|
||||
{
|
||||
match proto_node {
|
||||
castorepb::node::Node::Symlink(proto_symlink_node) => {
|
||||
Node::Symlink(proto_symlink_node) => {
|
||||
nar_node
|
||||
.symlink(&proto_symlink_node.target)
|
||||
.symlink(proto_symlink_node.target())
|
||||
.await
|
||||
.map_err(RenderError::NARWriterError)?;
|
||||
}
|
||||
castorepb::node::Node::File(proto_file_node) => {
|
||||
let digest_len = proto_file_node.digest.len();
|
||||
let digest = proto_file_node.digest.clone().try_into().map_err(|_| {
|
||||
RenderError::StoreError(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!("invalid digest len {} in file node", digest_len),
|
||||
))
|
||||
})?;
|
||||
Node::File(proto_file_node) => {
|
||||
let digest = proto_file_node.digest();
|
||||
|
||||
let mut blob_reader = match blob_service
|
||||
.open_read(&digest)
|
||||
.open_read(digest)
|
||||
.await
|
||||
.map_err(RenderError::StoreError)?
|
||||
{
|
||||
|
|
@ -153,36 +146,24 @@ where
|
|||
|
||||
nar_node
|
||||
.file(
|
||||
proto_file_node.executable,
|
||||
proto_file_node.size,
|
||||
proto_file_node.executable(),
|
||||
proto_file_node.size(),
|
||||
&mut blob_reader,
|
||||
)
|
||||
.await
|
||||
.map_err(RenderError::NARWriterError)?;
|
||||
}
|
||||
castorepb::node::Node::Directory(proto_directory_node) => {
|
||||
let digest_len = proto_directory_node.digest.len();
|
||||
let digest = proto_directory_node
|
||||
.digest
|
||||
.clone()
|
||||
.try_into()
|
||||
.map_err(|_| {
|
||||
RenderError::StoreError(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("invalid digest len {} in directory node", digest_len),
|
||||
))
|
||||
})?;
|
||||
|
||||
Node::Directory(proto_directory_node) => {
|
||||
// look it up with the directory service
|
||||
match directory_service
|
||||
.get(&digest)
|
||||
.get(proto_directory_node.digest())
|
||||
.await
|
||||
.map_err(|e| RenderError::StoreError(e.into()))?
|
||||
{
|
||||
// if it's None, that's an error!
|
||||
None => Err(RenderError::DirectoryNotFound(
|
||||
digest,
|
||||
proto_directory_node.name.clone(),
|
||||
proto_directory_node.digest().clone(),
|
||||
proto_directory_node.get_name().clone(),
|
||||
))?,
|
||||
Some(proto_directory) => {
|
||||
// start a directory node
|
||||
|
|
@ -206,7 +187,7 @@ where
|
|||
|
||||
(blob_service, directory_service) = Box::pin(walk_node(
|
||||
child_node,
|
||||
&proto_node,
|
||||
proto_node,
|
||||
blob_service,
|
||||
directory_service,
|
||||
))
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
use futures::stream::BoxStream;
|
||||
use futures::StreamExt;
|
||||
use tonic::async_trait;
|
||||
use tvix_castore::directoryservice::Node;
|
||||
use tvix_castore::fs::{RootNodes, TvixStoreFs};
|
||||
use tvix_castore::proto as castorepb;
|
||||
use tvix_castore::Error;
|
||||
use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
|
||||
use tvix_castore::{Error, ValidateNodeError};
|
||||
|
||||
use super::PathInfoService;
|
||||
|
||||
|
|
@ -48,7 +48,7 @@ impl<T> RootNodes for RootNodesWrapper<T>
|
|||
where
|
||||
T: AsRef<dyn PathInfoService> + Send + Sync,
|
||||
{
|
||||
async fn get_by_basename(&self, name: &[u8]) -> Result<Option<castorepb::node::Node>, Error> {
|
||||
async fn get_by_basename(&self, name: &[u8]) -> Result<Option<Node>, Error> {
|
||||
let Ok(store_path) = nix_compat::store_path::StorePath::from_bytes(name) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
|
@ -61,20 +61,23 @@ where
|
|||
.map(|path_info| {
|
||||
path_info
|
||||
.node
|
||||
.as_ref()
|
||||
.expect("missing root node")
|
||||
.node
|
||||
.expect("empty node")
|
||||
}))
|
||||
.try_into()
|
||||
.map_err(|e: ValidateNodeError| Error::StorageError(e.to_string()))
|
||||
})
|
||||
.transpose()?)
|
||||
}
|
||||
|
||||
fn list(&self) -> BoxStream<Result<castorepb::node::Node, Error>> {
|
||||
fn list(&self) -> BoxStream<Result<Node, Error>> {
|
||||
Box::pin(self.0.as_ref().list().map(|result| {
|
||||
result.map(|path_info| {
|
||||
result.and_then(|path_info| {
|
||||
path_info
|
||||
.node
|
||||
.as_ref()
|
||||
.expect("missing root node")
|
||||
.node
|
||||
.expect("empty node")
|
||||
.try_into()
|
||||
.map_err(|e: ValidateNodeError| Error::StorageError(e.to_string()))
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ use tonic::{async_trait, Code};
|
|||
use tracing::{instrument, Span};
|
||||
use tracing_indicatif::span_ext::IndicatifSpanExt;
|
||||
use tvix_castore::composition::{CompositionContext, ServiceBuilder};
|
||||
use tvix_castore::{proto as castorepb, Error};
|
||||
use tvix_castore::directoryservice::Node;
|
||||
use tvix_castore::Error;
|
||||
|
||||
/// Connects to a (remote) tvix-store PathInfoService over gRPC.
|
||||
#[derive(Clone)]
|
||||
|
|
@ -123,10 +124,7 @@ where
|
|||
T::Future: Send,
|
||||
{
|
||||
#[instrument(level = "trace", skip_all, fields(root_node = ?root_node, indicatif.pb_show=1))]
|
||||
async fn calculate_nar(
|
||||
&self,
|
||||
root_node: &castorepb::node::Node,
|
||||
) -> Result<(u64, [u8; 32]), Error> {
|
||||
async fn calculate_nar(&self, root_node: &Node) -> Result<(u64, [u8; 32]), Error> {
|
||||
let span = Span::current();
|
||||
span.pb_set_message("Waiting for NAR calculation");
|
||||
span.pb_start();
|
||||
|
|
@ -134,8 +132,8 @@ where
|
|||
let path_info = self
|
||||
.grpc_client
|
||||
.clone()
|
||||
.calculate_nar(castorepb::Node {
|
||||
node: Some(root_node.clone()),
|
||||
.calculate_nar(tvix_castore::proto::Node {
|
||||
node: Some(root_node.into()),
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::StorageError(e.to_string()))?
|
||||
|
|
|
|||
|
|
@ -109,7 +109,10 @@ mod test {
|
|||
let root_node = p.node.as_mut().unwrap();
|
||||
if let castorepb::Node { node: Some(node) } = root_node {
|
||||
let n = node.to_owned();
|
||||
*node = n.rename("11111111111111111111111111111111-dummy2".into());
|
||||
*node = (&tvix_castore::directoryservice::Node::try_from(&n)
|
||||
.unwrap()
|
||||
.rename("11111111111111111111111111111111-dummy2".into()))
|
||||
.into();
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ where
|
|||
Ok(Some(PathInfo {
|
||||
node: Some(castorepb::Node {
|
||||
// set the name of the root node to the digest-name of the store path.
|
||||
node: Some(root_node.rename(narinfo.store_path.to_string().to_owned().into())),
|
||||
node: Some((&root_node.rename(narinfo.store_path.to_string().into())).into()),
|
||||
}),
|
||||
references: pathinfo.references,
|
||||
narinfo: pathinfo.narinfo,
|
||||
|
|
|
|||
|
|
@ -74,24 +74,19 @@ where
|
|||
&self,
|
||||
request: Request<castorepb::Node>,
|
||||
) -> Result<Response<proto::CalculateNarResponse>> {
|
||||
match request.into_inner().node {
|
||||
None => Err(Status::invalid_argument("no root node sent")),
|
||||
Some(root_node) => {
|
||||
if let Err(e) = root_node.validate() {
|
||||
warn!(err = %e, "invalid root node");
|
||||
Err(Status::invalid_argument("invalid root node"))?
|
||||
}
|
||||
let root_node = (&request.into_inner()).try_into().map_err(|e| {
|
||||
warn!(err = %e, "invalid root node");
|
||||
Status::invalid_argument("invalid root node")
|
||||
})?;
|
||||
|
||||
match self.nar_calculation_service.calculate_nar(&root_node).await {
|
||||
Ok((nar_size, nar_sha256)) => Ok(Response::new(proto::CalculateNarResponse {
|
||||
nar_size,
|
||||
nar_sha256: nar_sha256.to_vec().into(),
|
||||
})),
|
||||
Err(e) => {
|
||||
warn!(err = %e, "error during NAR calculation");
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
match self.nar_calculation_service.calculate_nar(&root_node).await {
|
||||
Ok((nar_size, nar_sha256)) => Ok(Response::new(proto::CalculateNarResponse {
|
||||
nar_size,
|
||||
nar_sha256: nar_sha256.to_vec().into(),
|
||||
})),
|
||||
Err(e) => {
|
||||
warn!(err = %e, "error during NAR calculation");
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ use nix_compat::{
|
|||
store_path::{self, StorePathRef},
|
||||
};
|
||||
use thiserror::Error;
|
||||
use tvix_castore::proto::{self as castorepb, NamedNode, ValidateNodeError};
|
||||
use tvix_castore::directoryservice::NamedNode;
|
||||
use tvix_castore::ValidateNodeError;
|
||||
|
||||
mod grpc_pathinfoservice_wrapper;
|
||||
|
||||
|
|
@ -87,7 +88,7 @@ impl PathInfo {
|
|||
/// validate performs some checks on the PathInfo struct,
|
||||
/// Returning either a [store_path::StorePath] of the root node, or a
|
||||
/// [ValidatePathInfoError].
|
||||
pub fn validate(&self) -> Result<store_path::StorePathRef<'_>, ValidatePathInfoError> {
|
||||
pub fn validate(&self) -> Result<store_path::StorePath, ValidatePathInfoError> {
|
||||
// ensure the references have the right number of bytes.
|
||||
for (i, reference) in self.references.iter().enumerate() {
|
||||
if reference.len() != store_path::DIGEST_SIZE {
|
||||
|
|
@ -158,14 +159,15 @@ impl PathInfo {
|
|||
|
||||
// Ensure there is a (root) node present, and it properly parses to a [store_path::StorePath].
|
||||
let root_nix_path = match &self.node {
|
||||
None | Some(castorepb::Node { node: None }) => {
|
||||
Err(ValidatePathInfoError::NoNodePresent)?
|
||||
}
|
||||
Some(castorepb::Node { node: Some(node) }) => {
|
||||
node.validate()
|
||||
None => Err(ValidatePathInfoError::NoNodePresent)?,
|
||||
Some(node) => {
|
||||
// TODO save result somewhere
|
||||
let node: tvix_castore::directoryservice::Node = node
|
||||
.try_into()
|
||||
.map_err(ValidatePathInfoError::InvalidRootNode)?;
|
||||
// parse the name of the node itself and return
|
||||
parse_node_name_root(node.get_name(), ValidatePathInfoError::InvalidNodeName)?
|
||||
.to_owned()
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -3,17 +3,18 @@ use crate::tests::fixtures::*;
|
|||
use bytes::Bytes;
|
||||
use data_encoding::BASE64;
|
||||
use nix_compat::nixbase32;
|
||||
use nix_compat::store_path::{self, StorePathRef};
|
||||
use nix_compat::store_path::{self, StorePath, StorePathRef};
|
||||
use rstest::rstest;
|
||||
use tvix_castore::proto as castorepb;
|
||||
use tvix_castore::ValidateNodeError;
|
||||
|
||||
#[rstest]
|
||||
#[case::no_node(None, Err(ValidatePathInfoError::NoNodePresent))]
|
||||
#[case::no_node_2(Some(castorepb::Node { node: None}), Err(ValidatePathInfoError::NoNodePresent))]
|
||||
#[case::no_node_2(Some(castorepb::Node { node: None}), Err(ValidatePathInfoError::InvalidRootNode(ValidateNodeError::NoNodeSet)))]
|
||||
|
||||
fn validate_pathinfo(
|
||||
#[case] node: Option<castorepb::Node>,
|
||||
#[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
|
||||
#[case] exp_result: Result<StorePath, ValidatePathInfoError>,
|
||||
) {
|
||||
// construct the PathInfo object
|
||||
let p = PathInfo {
|
||||
|
|
@ -22,9 +23,6 @@ fn validate_pathinfo(
|
|||
};
|
||||
|
||||
assert_eq!(exp_result, p.validate());
|
||||
|
||||
let err = p.validate().expect_err("validation should fail");
|
||||
assert!(matches!(err, ValidatePathInfoError::NoNodePresent));
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
|
|
@ -32,12 +30,12 @@ fn validate_pathinfo(
|
|||
name: DUMMY_PATH.into(),
|
||||
digest: DUMMY_DIGEST.clone().into(),
|
||||
size: 0,
|
||||
}, Ok(StorePathRef::from_bytes(DUMMY_PATH.as_bytes()).unwrap()))]
|
||||
}, Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap()))]
|
||||
#[case::invalid_digest_length(castorepb::DirectoryNode {
|
||||
name: DUMMY_PATH.into(),
|
||||
digest: Bytes::new(),
|
||||
size: 0,
|
||||
}, Err(ValidatePathInfoError::InvalidRootNode(castorepb::ValidateNodeError::InvalidDigestLen(0))))]
|
||||
}, Err(ValidatePathInfoError::InvalidRootNode(tvix_castore::ValidateNodeError::InvalidDigestLen(0))))]
|
||||
#[case::invalid_node_name_no_storepath(castorepb::DirectoryNode {
|
||||
name: "invalid".into(),
|
||||
digest: DUMMY_DIGEST.clone().into(),
|
||||
|
|
@ -48,7 +46,7 @@ fn validate_pathinfo(
|
|||
)))]
|
||||
fn validate_directory(
|
||||
#[case] directory_node: castorepb::DirectoryNode,
|
||||
#[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
|
||||
#[case] exp_result: Result<StorePath, ValidatePathInfoError>,
|
||||
) {
|
||||
// construct the PathInfo object
|
||||
let p = PathInfo {
|
||||
|
|
@ -68,7 +66,7 @@ fn validate_directory(
|
|||
size: 0,
|
||||
executable: false,
|
||||
},
|
||||
Ok(StorePathRef::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
|
||||
Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
|
||||
)]
|
||||
#[case::invalid_digest_len(
|
||||
castorepb::FileNode {
|
||||
|
|
@ -76,7 +74,7 @@ fn validate_directory(
|
|||
digest: Bytes::new(),
|
||||
..Default::default()
|
||||
},
|
||||
Err(ValidatePathInfoError::InvalidRootNode(castorepb::ValidateNodeError::InvalidDigestLen(0)))
|
||||
Err(ValidatePathInfoError::InvalidRootNode(tvix_castore::ValidateNodeError::InvalidDigestLen(0)))
|
||||
)]
|
||||
#[case::invalid_node_name(
|
||||
castorepb::FileNode {
|
||||
|
|
@ -91,7 +89,7 @@ fn validate_directory(
|
|||
)]
|
||||
fn validate_file(
|
||||
#[case] file_node: castorepb::FileNode,
|
||||
#[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
|
||||
#[case] exp_result: Result<StorePath, ValidatePathInfoError>,
|
||||
) {
|
||||
// construct the PathInfo object
|
||||
let p = PathInfo {
|
||||
|
|
@ -109,7 +107,7 @@ fn validate_file(
|
|||
name: DUMMY_PATH.into(),
|
||||
target: "foo".into(),
|
||||
},
|
||||
Ok(StorePathRef::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
|
||||
Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
|
||||
)]
|
||||
#[case::invalid_node_name(
|
||||
castorepb::SymlinkNode {
|
||||
|
|
@ -123,7 +121,7 @@ fn validate_file(
|
|||
)]
|
||||
fn validate_symlink(
|
||||
#[case] symlink_node: castorepb::SymlinkNode,
|
||||
#[case] exp_result: Result<StorePathRef, ValidatePathInfoError>,
|
||||
#[case] exp_result: Result<StorePath, ValidatePathInfoError>,
|
||||
) {
|
||||
// construct the PathInfo object
|
||||
let p = PathInfo {
|
||||
|
|
@ -233,7 +231,7 @@ fn validate_symlink_empty_target_invalid() {
|
|||
target: "".into(),
|
||||
});
|
||||
|
||||
node.validate().expect_err("must fail validation");
|
||||
tvix_castore::directoryservice::Node::try_from(&node).expect_err("must fail validation");
|
||||
}
|
||||
|
||||
/// Create a node with a symlink target including null bytes, and ensure it
|
||||
|
|
@ -245,7 +243,7 @@ fn validate_symlink_target_null_byte_invalid() {
|
|||
target: "foo\0".into(),
|
||||
});
|
||||
|
||||
node.validate().expect_err("must fail validation");
|
||||
tvix_castore::directoryservice::Node::try_from(&node).expect_err("must fail validation");
|
||||
}
|
||||
|
||||
/// Create a PathInfo with a correct deriver field and ensure it succeeds.
|
||||
|
|
|
|||
|
|
@ -9,8 +9,9 @@ use std::io;
|
|||
use std::sync::Arc;
|
||||
use tokio::io::sink;
|
||||
use tvix_castore::blobservice::BlobService;
|
||||
use tvix_castore::directoryservice::DirectoryService;
|
||||
use tvix_castore::proto as castorepb;
|
||||
use tvix_castore::directoryservice::{
|
||||
DirectoryNode, DirectoryService, FileNode, Node, SymlinkNode,
|
||||
};
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
|
|
@ -22,10 +23,9 @@ async fn single_symlink(
|
|||
|
||||
write_nar(
|
||||
&mut buf,
|
||||
&castorepb::node::Node::Symlink(castorepb::SymlinkNode {
|
||||
name: "doesntmatter".into(),
|
||||
target: "/nix/store/somewhereelse".into(),
|
||||
}),
|
||||
&Node::Symlink(
|
||||
SymlinkNode::new("doesntmatter".into(), "/nix/store/somewhereelse".into()).unwrap(),
|
||||
),
|
||||
// don't put anything in the stores, as we don't actually do any requests.
|
||||
blob_service,
|
||||
directory_service,
|
||||
|
|
@ -45,12 +45,15 @@ async fn single_file_missing_blob(
|
|||
) {
|
||||
let e = write_nar(
|
||||
sink(),
|
||||
&castorepb::node::Node::File(castorepb::FileNode {
|
||||
name: "doesntmatter".into(),
|
||||
digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
|
||||
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
||||
executable: false,
|
||||
}),
|
||||
&Node::File(
|
||||
FileNode::new(
|
||||
"doesntmatter".into(),
|
||||
HELLOWORLD_BLOB_DIGEST.clone(),
|
||||
HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
||||
false,
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
// the blobservice is empty intentionally, to provoke the error.
|
||||
blob_service,
|
||||
directory_service,
|
||||
|
|
@ -90,12 +93,15 @@ async fn single_file_wrong_blob_size(
|
|||
// Test with a root FileNode of a too big size
|
||||
let e = write_nar(
|
||||
sink(),
|
||||
&castorepb::node::Node::File(castorepb::FileNode {
|
||||
name: "doesntmatter".into(),
|
||||
digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
|
||||
size: 42, // <- note the wrong size here!
|
||||
executable: false,
|
||||
}),
|
||||
&Node::File(
|
||||
FileNode::new(
|
||||
"doesntmatter".into(),
|
||||
HELLOWORLD_BLOB_DIGEST.clone(),
|
||||
42, // <- note the wrong size here!
|
||||
false,
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
blob_service.clone(),
|
||||
directory_service.clone(),
|
||||
)
|
||||
|
|
@ -112,12 +118,15 @@ async fn single_file_wrong_blob_size(
|
|||
// Test with a root FileNode of a too small size
|
||||
let e = write_nar(
|
||||
sink(),
|
||||
&castorepb::node::Node::File(castorepb::FileNode {
|
||||
name: "doesntmatter".into(),
|
||||
digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
|
||||
size: 2, // <- note the wrong size here!
|
||||
executable: false,
|
||||
}),
|
||||
&Node::File(
|
||||
FileNode::new(
|
||||
"doesntmatter".into(),
|
||||
HELLOWORLD_BLOB_DIGEST.clone(),
|
||||
2, // <- note the wrong size here!
|
||||
false,
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
blob_service,
|
||||
directory_service,
|
||||
)
|
||||
|
|
@ -153,12 +162,15 @@ async fn single_file(
|
|||
|
||||
write_nar(
|
||||
&mut buf,
|
||||
&castorepb::node::Node::File(castorepb::FileNode {
|
||||
name: "doesntmatter".into(),
|
||||
digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
|
||||
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
||||
executable: false,
|
||||
}),
|
||||
&Node::File(
|
||||
FileNode::new(
|
||||
"doesntmatter".into(),
|
||||
HELLOWORLD_BLOB_DIGEST.clone(),
|
||||
HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
||||
false,
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
blob_service,
|
||||
directory_service,
|
||||
)
|
||||
|
|
@ -196,11 +208,14 @@ async fn test_complicated(
|
|||
|
||||
write_nar(
|
||||
&mut buf,
|
||||
&castorepb::node::Node::Directory(castorepb::DirectoryNode {
|
||||
name: "doesntmatter".into(),
|
||||
digest: DIRECTORY_COMPLICATED.digest().into(),
|
||||
size: DIRECTORY_COMPLICATED.size(),
|
||||
}),
|
||||
&Node::Directory(
|
||||
DirectoryNode::new(
|
||||
"doesntmatter".into(),
|
||||
DIRECTORY_COMPLICATED.digest(),
|
||||
DIRECTORY_COMPLICATED.size(),
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
blob_service.clone(),
|
||||
directory_service.clone(),
|
||||
)
|
||||
|
|
@ -211,11 +226,14 @@ async fn test_complicated(
|
|||
|
||||
// ensure calculate_nar does return the correct sha256 digest and sum.
|
||||
let (nar_size, nar_digest) = calculate_size_and_sha256(
|
||||
&castorepb::node::Node::Directory(castorepb::DirectoryNode {
|
||||
name: "doesntmatter".into(),
|
||||
digest: DIRECTORY_COMPLICATED.digest().into(),
|
||||
size: DIRECTORY_COMPLICATED.size(),
|
||||
}),
|
||||
&Node::Directory(
|
||||
DirectoryNode::new(
|
||||
"doesntmatter".into(),
|
||||
DIRECTORY_COMPLICATED.digest(),
|
||||
DIRECTORY_COMPLICATED.size(),
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
blob_service,
|
||||
directory_service,
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue