refactor(tvix/castore): drop {Directory,File,Symlink}Node

Add a `SymlinkTarget` type to represent validated symlink targets.
With this, no invalid states are representable, so we can make `Node` be
just an enum of all three kind of types, and allow access to these
fields directly.

Change-Id: I20bdd480c8d5e64a827649f303c97023b7e390f2
Reviewed-on: https://cl.tvl.fyi/c/depot/+/12216
Reviewed-by: benjaminedwardwebb <benjaminedwardwebb@gmail.com>
Autosubmit: flokli <flokli@flokli.de>
Reviewed-by: Connor Brewster <cbrewster@hey.com>
Tested-by: BuildkiteCI
This commit is contained in:
Florian Klink 2024-08-16 02:24:12 +03:00 committed by clbot
parent 49b173786c
commit 8ea7d2b60e
27 changed files with 555 additions and 461 deletions

View file

@ -29,27 +29,27 @@ impl From<CAHash> for nar_info::Ca {
pub fn log_node(name: &[u8], node: &Node, path: &Path) {
match node {
Node::Directory(directory_node) => {
Node::Directory { digest, .. } => {
debug!(
path = ?path,
name = %name.as_bstr(),
digest = %directory_node.digest(),
digest = %digest,
"import successful",
)
}
Node::File(file_node) => {
Node::File { digest, .. } => {
debug!(
path = ?path,
name = %name.as_bstr(),
digest = %file_node.digest(),
digest = %digest,
"import successful"
)
}
Node::Symlink(symlink_node) => {
Node::Symlink { target } => {
debug!(
path = ?path,
name = %name.as_bstr(),
target = ?symlink_node.target(),
target = ?target,
"import successful"
)
}

View file

@ -174,7 +174,7 @@ mod test {
DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP, EMPTY_BLOB_DIGEST, HELLOWORLD_BLOB_CONTENTS,
HELLOWORLD_BLOB_DIGEST,
};
use tvix_castore::{Directory, DirectoryNode, FileNode, Node, SymlinkNode};
use tvix_castore::{Directory, Node};
use crate::tests::fixtures::{
blob_service, directory_service, NAR_CONTENTS_COMPLICATED, NAR_CONTENTS_HELLOWORLD,
@ -196,7 +196,9 @@ mod test {
.expect("must parse");
assert_eq!(
Node::Symlink(SymlinkNode::new("/nix/store/somewhereelse".into(),).unwrap()),
Node::Symlink {
target: "/nix/store/somewhereelse".try_into().unwrap()
},
root_node
);
}
@ -216,11 +218,11 @@ mod test {
.expect("must parse");
assert_eq!(
Node::File(FileNode::new(
HELLOWORLD_BLOB_DIGEST.clone(),
HELLOWORLD_BLOB_CONTENTS.len() as u64,
false,
)),
Node::File {
digest: HELLOWORLD_BLOB_DIGEST.clone(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
executable: false,
},
root_node
);
@ -243,10 +245,10 @@ mod test {
.expect("must parse");
assert_eq!(
Node::Directory(DirectoryNode::new(
DIRECTORY_COMPLICATED.digest(),
DIRECTORY_COMPLICATED.size(),
)),
Node::Directory {
digest: DIRECTORY_COMPLICATED.digest(),
size: DIRECTORY_COMPLICATED.size()
},
root_node,
);

View file

@ -82,7 +82,7 @@ where
/// The contents in NAR serialization are writen to the passed [AsyncWrite].
pub async fn write_nar<W, BS, DS>(
mut w: W,
proto_root_node: &Node,
root_node: &Node,
blob_service: BS,
directory_service: DS,
) -> Result<(), RenderError>
@ -98,7 +98,7 @@ where
walk_node(
nar_root_node,
proto_root_node,
root_node,
b"",
blob_service,
directory_service,
@ -122,15 +122,17 @@ where
DS: DirectoryService + Send,
{
match castore_node {
Node::Symlink(symlink_node) => {
Node::Symlink { target, .. } => {
nar_node
.symlink(symlink_node.target())
.symlink(target.as_ref())
.await
.map_err(RenderError::NARWriterError)?;
}
Node::File(proto_file_node) => {
let digest = proto_file_node.digest();
Node::File {
digest,
size,
executable,
} => {
let mut blob_reader = match blob_service
.open_read(digest)
.await
@ -144,24 +146,20 @@ where
}?;
nar_node
.file(
proto_file_node.executable(),
proto_file_node.size(),
&mut blob_reader,
)
.file(*executable, *size, &mut blob_reader)
.await
.map_err(RenderError::NARWriterError)?;
}
Node::Directory(directory_node) => {
Node::Directory { digest, .. } => {
// look it up with the directory service
match directory_service
.get(directory_node.digest())
.get(digest)
.await
.map_err(|e| RenderError::StoreError(e.into()))?
{
// if it's None, that's an error!
None => Err(RenderError::DirectoryNotFound(
directory_node.digest().clone(),
digest.clone(),
bytes::Bytes::copy_from_slice(name),
))?,
Some(directory) => {

View file

@ -10,7 +10,7 @@ use std::sync::Arc;
use tokio::io::sink;
use tvix_castore::blobservice::BlobService;
use tvix_castore::directoryservice::DirectoryService;
use tvix_castore::{DirectoryNode, FileNode, Node, SymlinkNode};
use tvix_castore::Node;
#[rstest]
#[tokio::test]
@ -22,7 +22,9 @@ async fn single_symlink(
write_nar(
&mut buf,
&Node::Symlink(SymlinkNode::new("/nix/store/somewhereelse".into()).unwrap()),
&Node::Symlink {
target: "/nix/store/somewhereelse".try_into().unwrap(),
},
// don't put anything in the stores, as we don't actually do any requests.
blob_service,
directory_service,
@ -42,11 +44,11 @@ async fn single_file_missing_blob(
) {
let e = write_nar(
sink(),
&Node::File(FileNode::new(
HELLOWORLD_BLOB_DIGEST.clone(),
HELLOWORLD_BLOB_CONTENTS.len() as u64,
false,
)),
&Node::File {
digest: HELLOWORLD_BLOB_DIGEST.clone(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
executable: false,
},
// the blobservice is empty intentionally, to provoke the error.
blob_service,
directory_service,
@ -86,11 +88,11 @@ async fn single_file_wrong_blob_size(
// Test with a root FileNode of a too big size
let e = write_nar(
sink(),
&Node::File(FileNode::new(
HELLOWORLD_BLOB_DIGEST.clone(),
42, // <- note the wrong size here!
false,
)),
&Node::File {
digest: HELLOWORLD_BLOB_DIGEST.clone(),
size: 42, // <- note the wrong size here!
executable: false,
},
blob_service.clone(),
directory_service.clone(),
)
@ -107,11 +109,11 @@ async fn single_file_wrong_blob_size(
// Test with a root FileNode of a too small size
let e = write_nar(
sink(),
&Node::File(FileNode::new(
HELLOWORLD_BLOB_DIGEST.clone(),
2, // <- note the wrong size here!
false,
)),
&Node::File {
digest: HELLOWORLD_BLOB_DIGEST.clone(),
size: 2, // <- note the wrong size here!
executable: false,
},
blob_service,
directory_service,
)
@ -147,11 +149,11 @@ async fn single_file(
write_nar(
&mut buf,
&Node::File(FileNode::new(
HELLOWORLD_BLOB_DIGEST.clone(),
HELLOWORLD_BLOB_CONTENTS.len() as u64,
false,
)),
&Node::File {
digest: HELLOWORLD_BLOB_DIGEST.clone(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
executable: false,
},
blob_service,
directory_service,
)
@ -189,10 +191,10 @@ async fn test_complicated(
write_nar(
&mut buf,
&Node::Directory(DirectoryNode::new(
DIRECTORY_COMPLICATED.digest(),
DIRECTORY_COMPLICATED.size(),
)),
&Node::Directory {
digest: DIRECTORY_COMPLICATED.digest(),
size: DIRECTORY_COMPLICATED.size(),
},
blob_service.clone(),
directory_service.clone(),
)
@ -203,10 +205,10 @@ async fn test_complicated(
// ensure calculate_nar does return the correct sha256 digest and sum.
let (nar_size, nar_digest) = calculate_size_and_sha256(
&Node::Directory(DirectoryNode::new(
DIRECTORY_COMPLICATED.digest(),
DIRECTORY_COMPLICATED.size(),
)),
&Node::Directory {
digest: DIRECTORY_COMPLICATED.digest(),
size: DIRECTORY_COMPLICATED.size(),
},
blob_service,
directory_service,
)