refactor(tvix/nar-bridge): have Export return root node
… and nar size / sha256 digest. Instead of producing sparse PathInfo messages when NARs are sent to nar-bridge, the nar-bridge http server now keeps a lookup table (narsha256) -> (rootNode, narSize) This removes a whole bunch of noise, because we don't need to keep sparse fields around. A convenience function `GenPathInfo(rootNode *castorev1pb.Node, narInfo *narinfo.NarInfo)` is added, which is used to produce PathInfo messages, either when receiving a NAR file over http and uploading it to a remote PathInfoService, or to synthesize the PathInfoMessage to return to the client, if nar-bridge is acting as a PathInfoService for a remove Nix HTTP Binary cache. Change-Id: Ibba1ab6238a050816c4fab29cb21ae88877d8613 Reviewed-on: https://cl.tvl.fyi/c/depot/+/9651 Tested-by: BuildkiteCI Reviewed-by: Brian McGee <brian@bmcgee.ie>
This commit is contained in:
parent
ceb1674e9f
commit
98c17147c6
10 changed files with 211 additions and 297 deletions
62
tvix/nar-bridge/pkg/importer/gen_pathinfo.go
Normal file
62
tvix/nar-bridge/pkg/importer/gen_pathinfo.go
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
package importer
|
||||
|
||||
import (
|
||||
castorev1pb "code.tvl.fyi/tvix/castore/protos"
|
||||
storev1pb "code.tvl.fyi/tvix/store/protos"
|
||||
"fmt"
|
||||
"github.com/nix-community/go-nix/pkg/narinfo"
|
||||
"github.com/nix-community/go-nix/pkg/storepath"
|
||||
)
|
||||
|
||||
// GenPathInfo takes a rootNode and narInfo and assembles a PathInfo.
|
||||
// The rootNode is renamed to match the StorePath in the narInfo.
|
||||
func GenPathInfo(rootNode *castorev1pb.Node, narInfo *narinfo.NarInfo) (*storev1pb.PathInfo, error) {
|
||||
// parse the storePath from the .narinfo
|
||||
storePath, err := storepath.FromAbsolutePath(narInfo.StorePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse StorePath: %w", err)
|
||||
}
|
||||
|
||||
// construct the references, by parsing ReferenceNames and extracting the digest
|
||||
references := make([][]byte, len(narInfo.References))
|
||||
for i, referenceStr := range narInfo.References {
|
||||
// parse reference as store path
|
||||
referenceStorePath, err := storepath.FromString(referenceStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse reference %s as storepath: %w", referenceStr, err)
|
||||
}
|
||||
references[i] = referenceStorePath.Digest
|
||||
}
|
||||
|
||||
// construct the narInfo.Signatures[*] from pathInfo.Narinfo.Signatures[*]
|
||||
narinfoSignatures := make([]*storev1pb.NARInfo_Signature, len(narInfo.Signatures))
|
||||
for i, narinfoSig := range narInfo.Signatures {
|
||||
narinfoSignatures[i] = &storev1pb.NARInfo_Signature{
|
||||
Name: narinfoSig.Name,
|
||||
Data: narinfoSig.Data,
|
||||
}
|
||||
}
|
||||
|
||||
// assemble the PathInfo.
|
||||
pathInfo := &storev1pb.PathInfo{
|
||||
// embed a new root node with the name set to the store path basename.
|
||||
Node: castorev1pb.RenamedNode(rootNode, storePath.String()),
|
||||
References: references,
|
||||
Narinfo: &storev1pb.NARInfo{
|
||||
NarSize: narInfo.NarSize,
|
||||
NarSha256: narInfo.FileHash.Digest(),
|
||||
Signatures: narinfoSignatures,
|
||||
ReferenceNames: narInfo.References,
|
||||
},
|
||||
}
|
||||
|
||||
// run Validate on the PathInfo, more as an additional sanity check our code is sound,
|
||||
// to make sure we populated everything properly, before returning it.
|
||||
// Fail hard if we fail validation, this is a code error.
|
||||
if _, err = pathInfo.Validate(); err != nil {
|
||||
panic(fmt.Sprintf("PathInfo failed validation: %v", err))
|
||||
}
|
||||
|
||||
return pathInfo, nil
|
||||
|
||||
}
|
||||
|
|
@ -10,7 +10,6 @@ import (
|
|||
"strings"
|
||||
|
||||
castorev1pb "code.tvl.fyi/tvix/castore/protos"
|
||||
storev1pb "code.tvl.fyi/tvix/store/protos"
|
||||
"github.com/nix-community/go-nix/pkg/nar"
|
||||
)
|
||||
|
||||
|
|
@ -20,8 +19,8 @@ type stackItem struct {
|
|||
directory *castorev1pb.Directory
|
||||
}
|
||||
|
||||
// Import reads NAR from a reader, and returns a (sparsely populated) PathInfo
|
||||
// object.
|
||||
// Import reads a NAR from a reader, and returns a the root node,
|
||||
// NAR size and NAR sha256 digest.
|
||||
func Import(
|
||||
// a context, to support cancellation
|
||||
ctx context.Context,
|
||||
|
|
@ -31,7 +30,7 @@ func Import(
|
|||
blobCb func(fileReader io.Reader) ([]byte, error),
|
||||
// callback function called with each finalized directory node
|
||||
directoryCb func(directory *castorev1pb.Directory) ([]byte, error),
|
||||
) (*storev1pb.PathInfo, error) {
|
||||
) (*castorev1pb.Node, uint64, []byte, error) {
|
||||
// We need to wrap the underlying reader a bit.
|
||||
// - we want to keep track of the number of bytes read in total
|
||||
// - we calculate the sha256 digest over all data read
|
||||
|
|
@ -42,7 +41,7 @@ func Import(
|
|||
multiW := io.MultiWriter(narCountW, sha256W)
|
||||
narReader, err := nar.NewReader(io.TeeReader(r, multiW))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to instantiate nar reader: %w", err)
|
||||
return nil, 0, nil, fmt.Errorf("failed to instantiate nar reader: %w", err)
|
||||
}
|
||||
defer narReader.Close()
|
||||
|
||||
|
|
@ -98,7 +97,7 @@ func Import(
|
|||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
return nil, 0, nil, ctx.Err()
|
||||
default:
|
||||
// call narReader.Next() to get the next element
|
||||
hdr, err := narReader.Next()
|
||||
|
|
@ -108,60 +107,49 @@ func Import(
|
|||
if err != nil {
|
||||
// if this returns no EOF, bail out
|
||||
if !errors.Is(err, io.EOF) {
|
||||
return nil, fmt.Errorf("failed getting next nar element: %w", err)
|
||||
return nil, 0, nil, fmt.Errorf("failed getting next nar element: %w", err)
|
||||
}
|
||||
|
||||
// The NAR has been read all the way to the end…
|
||||
// Make sure we close the nar reader, which might read some final trailers.
|
||||
if err := narReader.Close(); err != nil {
|
||||
return nil, fmt.Errorf("unable to close nar reader: %w", err)
|
||||
return nil, 0, nil, fmt.Errorf("unable to close nar reader: %w", err)
|
||||
}
|
||||
|
||||
// Check the stack. While it's not empty, we need to pop things off the stack.
|
||||
for len(stack) > 0 {
|
||||
err := popFromStack()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to pop from stack: %w", err)
|
||||
return nil, 0, nil, fmt.Errorf("unable to pop from stack: %w", err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Stack is empty. We now either have a regular or symlink root node,
|
||||
// or we encountered at least one directory assemble pathInfo with these and
|
||||
// return.
|
||||
pi := &storev1pb.PathInfo{
|
||||
Node: nil,
|
||||
References: [][]byte{},
|
||||
Narinfo: &storev1pb.NARInfo{
|
||||
NarSize: narCountW.BytesWritten(),
|
||||
NarSha256: sha256W.Sum(nil),
|
||||
Signatures: []*storev1pb.NARInfo_Signature{},
|
||||
ReferenceNames: []string{},
|
||||
},
|
||||
}
|
||||
// Stack is empty.
|
||||
// Now either root{File,Symlink,Directory} is not nil,
|
||||
// and we can return the root node.
|
||||
narSize := narCountW.BytesWritten()
|
||||
narSha256 := sha256W.Sum(nil)
|
||||
|
||||
if rootFile != nil {
|
||||
pi.Node = &castorev1pb.Node{
|
||||
return &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_File{
|
||||
File: rootFile,
|
||||
},
|
||||
}
|
||||
}
|
||||
if rootSymlink != nil {
|
||||
pi.Node = &castorev1pb.Node{
|
||||
}, narSize, narSha256, nil
|
||||
} else if rootSymlink != nil {
|
||||
return &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Symlink{
|
||||
Symlink: rootSymlink,
|
||||
},
|
||||
}
|
||||
}
|
||||
if stackDirectory != nil {
|
||||
}, narSize, narSha256, nil
|
||||
} else if stackDirectory != nil {
|
||||
// calculate directory digest (i.e. after we received all its contents)
|
||||
dgst, err := stackDirectory.Digest()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to calculate root directory digest: %w", err)
|
||||
return nil, 0, nil, fmt.Errorf("unable to calculate root directory digest: %w", err)
|
||||
}
|
||||
|
||||
pi.Node = &castorev1pb.Node{
|
||||
return &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Directory{
|
||||
Directory: &castorev1pb.DirectoryNode{
|
||||
Name: []byte{},
|
||||
|
|
@ -169,9 +157,10 @@ func Import(
|
|||
Size: stackDirectory.Size(),
|
||||
},
|
||||
},
|
||||
}
|
||||
}, narSize, narSha256, nil
|
||||
} else {
|
||||
return nil, 0, nil, fmt.Errorf("no root set")
|
||||
}
|
||||
return pi, nil
|
||||
}
|
||||
|
||||
// Check for valid path transitions, pop from stack if needed
|
||||
|
|
@ -185,7 +174,7 @@ func Import(
|
|||
for len(stack) > 1 && !strings.HasPrefix(hdr.Path, stack[len(stack)-1].path+"/") {
|
||||
err := popFromStack()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to pop from stack: %w", err)
|
||||
return nil, 0, nil, fmt.Errorf("unable to pop from stack: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -209,7 +198,7 @@ func Import(
|
|||
|
||||
blobDigest, err := blobCb(blobReader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failure from blobCb: %w", err)
|
||||
return nil, 0, nil, fmt.Errorf("failure from blobCb: %w", err)
|
||||
}
|
||||
|
||||
// ensure blobCb did read all the way to the end.
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ import (
|
|||
|
||||
castorev1pb "code.tvl.fyi/tvix/castore/protos"
|
||||
"code.tvl.fyi/tvix/nar-bridge/pkg/importer"
|
||||
storev1pb "code.tvl.fyi/tvix/store/protos"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
|
@ -18,7 +17,7 @@ func TestSymlink(t *testing.T) {
|
|||
f, err := os.Open("../../testdata/symlink.nar")
|
||||
require.NoError(t, err)
|
||||
|
||||
actualPathInfo, err := importer.Import(
|
||||
rootNode, narSize, narSha256, err := importer.Import(
|
||||
context.Background(),
|
||||
f,
|
||||
func(blobReader io.Reader) ([]byte, error) {
|
||||
|
|
@ -28,35 +27,25 @@ func TestSymlink(t *testing.T) {
|
|||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedPathInfo := &storev1pb.PathInfo{
|
||||
Node: &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Symlink{
|
||||
Symlink: &castorev1pb.SymlinkNode{
|
||||
Name: []byte(""),
|
||||
Target: []byte("/nix/store/somewhereelse"),
|
||||
},
|
||||
require.Equal(t, &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Symlink{
|
||||
Symlink: &castorev1pb.SymlinkNode{
|
||||
Name: []byte(""),
|
||||
Target: []byte("/nix/store/somewhereelse"),
|
||||
},
|
||||
},
|
||||
References: [][]byte{},
|
||||
Narinfo: &storev1pb.NARInfo{
|
||||
NarSize: 136,
|
||||
NarSha256: []byte{
|
||||
0x09, 0x7d, 0x39, 0x7e, 0x9b, 0x58, 0x26, 0x38, 0x4e, 0xaa, 0x16, 0xc4, 0x57, 0x71, 0x5d, 0x1c, 0x1a, 0x51, 0x67, 0x03, 0x13, 0xea, 0xd0, 0xf5, 0x85, 0x66, 0xe0, 0xb2, 0x32, 0x53, 0x9c, 0xf1,
|
||||
},
|
||||
Signatures: []*storev1pb.NARInfo_Signature{},
|
||||
ReferenceNames: []string{},
|
||||
},
|
||||
}
|
||||
|
||||
requireProtoEq(t, expectedPathInfo, actualPathInfo)
|
||||
}, rootNode)
|
||||
require.Equal(t, []byte{
|
||||
0x09, 0x7d, 0x39, 0x7e, 0x9b, 0x58, 0x26, 0x38, 0x4e, 0xaa, 0x16, 0xc4, 0x57, 0x71, 0x5d, 0x1c, 0x1a, 0x51, 0x67, 0x03, 0x13, 0xea, 0xd0, 0xf5, 0x85, 0x66, 0xe0, 0xb2, 0x32, 0x53, 0x9c, 0xf1,
|
||||
}, narSha256)
|
||||
require.Equal(t, uint64(136), narSize)
|
||||
}
|
||||
|
||||
func TestRegular(t *testing.T) {
|
||||
f, err := os.Open("../../testdata/onebyteregular.nar")
|
||||
require.NoError(t, err)
|
||||
|
||||
actualPathInfo, err := importer.Import(
|
||||
rootNode, narSize, narSha256, err := importer.Import(
|
||||
context.Background(),
|
||||
f,
|
||||
func(blobReader io.Reader) ([]byte, error) {
|
||||
|
|
@ -68,7 +57,6 @@ func TestRegular(t *testing.T) {
|
|||
panic("no directories expected!")
|
||||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
// The blake3 digest of the 0x01 byte.
|
||||
BLAKE3_DIGEST_0X01 := []byte{
|
||||
|
|
@ -77,29 +65,21 @@ func TestRegular(t *testing.T) {
|
|||
0x65, 0x2b,
|
||||
}
|
||||
|
||||
expectedPathInfo := &storev1pb.PathInfo{
|
||||
Node: &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_File{
|
||||
File: &castorev1pb.FileNode{
|
||||
Name: []byte(""),
|
||||
Digest: BLAKE3_DIGEST_0X01,
|
||||
Size: 1,
|
||||
Executable: false,
|
||||
},
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_File{
|
||||
File: &castorev1pb.FileNode{
|
||||
Name: []byte(""),
|
||||
Digest: BLAKE3_DIGEST_0X01,
|
||||
Size: 1,
|
||||
Executable: false,
|
||||
},
|
||||
},
|
||||
References: [][]byte{},
|
||||
Narinfo: &storev1pb.NARInfo{
|
||||
NarSize: 120,
|
||||
NarSha256: []byte{
|
||||
0x73, 0x08, 0x50, 0xa8, 0x11, 0x25, 0x9d, 0xbf, 0x3a, 0x68, 0xdc, 0x2e, 0xe8, 0x7a, 0x79, 0xaa, 0x6c, 0xae, 0x9f, 0x71, 0x37, 0x5e, 0xdf, 0x39, 0x6f, 0x9d, 0x7a, 0x91, 0xfb, 0xe9, 0x13, 0x4d,
|
||||
},
|
||||
Signatures: []*storev1pb.NARInfo_Signature{},
|
||||
ReferenceNames: []string{},
|
||||
},
|
||||
}
|
||||
|
||||
requireProtoEq(t, expectedPathInfo, actualPathInfo)
|
||||
}, rootNode)
|
||||
require.Equal(t, []byte{
|
||||
0x73, 0x08, 0x50, 0xa8, 0x11, 0x25, 0x9d, 0xbf, 0x3a, 0x68, 0xdc, 0x2e, 0xe8, 0x7a, 0x79, 0xaa, 0x6c, 0xae, 0x9f, 0x71, 0x37, 0x5e, 0xdf, 0x39, 0x6f, 0x9d, 0x7a, 0x91, 0xfb, 0xe9, 0x13, 0x4d,
|
||||
}, narSha256)
|
||||
require.Equal(t, uint64(120), narSize)
|
||||
}
|
||||
|
||||
func TestEmptyDirectory(t *testing.T) {
|
||||
|
|
@ -111,7 +91,7 @@ func TestEmptyDirectory(t *testing.T) {
|
|||
Files: []*castorev1pb.FileNode{},
|
||||
Symlinks: []*castorev1pb.SymlinkNode{},
|
||||
}
|
||||
actualPathInfo, err := importer.Import(
|
||||
rootNode, narSize, narSha256, err := importer.Import(
|
||||
context.Background(),
|
||||
f,
|
||||
func(blobReader io.Reader) ([]byte, error) {
|
||||
|
|
@ -122,28 +102,19 @@ func TestEmptyDirectory(t *testing.T) {
|
|||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedPathInfo := &storev1pb.PathInfo{
|
||||
Node: &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Directory{
|
||||
Directory: &castorev1pb.DirectoryNode{
|
||||
Name: []byte(""),
|
||||
Digest: mustDirectoryDigest(expectedDirectory),
|
||||
Size: expectedDirectory.Size(),
|
||||
},
|
||||
require.Equal(t, &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Directory{
|
||||
Directory: &castorev1pb.DirectoryNode{
|
||||
Name: []byte(""),
|
||||
Digest: mustDirectoryDigest(expectedDirectory),
|
||||
Size: expectedDirectory.Size(),
|
||||
},
|
||||
},
|
||||
References: [][]byte{},
|
||||
Narinfo: &storev1pb.NARInfo{
|
||||
NarSize: 96,
|
||||
NarSha256: []byte{
|
||||
0xa5, 0x0a, 0x5a, 0xb6, 0xd9, 0x92, 0xf5, 0x59, 0x8e, 0xdd, 0x92, 0x10, 0x50, 0x59, 0xfa, 0xe9, 0xac, 0xfc, 0x19, 0x29, 0x81, 0xe0, 0x8b, 0xd8, 0x85, 0x34, 0xc2, 0x16, 0x7e, 0x92, 0x52, 0x6a,
|
||||
},
|
||||
Signatures: []*storev1pb.NARInfo_Signature{},
|
||||
ReferenceNames: []string{},
|
||||
},
|
||||
}
|
||||
requireProtoEq(t, expectedPathInfo, actualPathInfo)
|
||||
}, rootNode)
|
||||
require.Equal(t, []byte{
|
||||
0xa5, 0x0a, 0x5a, 0xb6, 0xd9, 0x92, 0xf5, 0x59, 0x8e, 0xdd, 0x92, 0x10, 0x50, 0x59, 0xfa, 0xe9, 0xac, 0xfc, 0x19, 0x29, 0x81, 0xe0, 0x8b, 0xd8, 0x85, 0x34, 0xc2, 0x16, 0x7e, 0x92, 0x52, 0x6a,
|
||||
}, narSha256)
|
||||
require.Equal(t, uint64(96), narSize)
|
||||
}
|
||||
|
||||
func TestFull(t *testing.T) {
|
||||
|
|
@ -458,7 +429,7 @@ func TestFull(t *testing.T) {
|
|||
|
||||
numDirectoriesReceived := 0
|
||||
|
||||
actualPathInfo, err := importer.Import(
|
||||
rootNode, narSize, narSha256, err := importer.Import(
|
||||
context.Background(),
|
||||
f,
|
||||
func(blobReader io.Reader) ([]byte, error) {
|
||||
|
|
@ -480,28 +451,19 @@ func TestFull(t *testing.T) {
|
|||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedPathInfo := &storev1pb.PathInfo{
|
||||
Node: &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Directory{
|
||||
Directory: &castorev1pb.DirectoryNode{
|
||||
Name: []byte(""),
|
||||
Digest: mustDirectoryDigest(expectedDirectories["/"]),
|
||||
Size: expectedDirectories["/"].Size(),
|
||||
},
|
||||
require.Equal(t, &castorev1pb.Node{
|
||||
Node: &castorev1pb.Node_Directory{
|
||||
Directory: &castorev1pb.DirectoryNode{
|
||||
Name: []byte(""),
|
||||
Digest: mustDirectoryDigest(expectedDirectories["/"]),
|
||||
Size: expectedDirectories["/"].Size(),
|
||||
},
|
||||
},
|
||||
References: [][]byte{},
|
||||
Narinfo: &storev1pb.NARInfo{
|
||||
NarSize: 464152,
|
||||
NarSha256: []byte{
|
||||
0xc6, 0xe1, 0x55, 0xb3, 0x45, 0x6e, 0x30, 0xb7, 0x61, 0x22, 0x63, 0xec, 0x09, 0x50, 0x70, 0x81, 0x1c, 0xaf, 0x8a, 0xbf, 0xd5, 0x9f, 0xaa, 0x72, 0xab, 0x82, 0xa5, 0x92, 0xef, 0xde, 0xb2, 0x53,
|
||||
},
|
||||
Signatures: []*storev1pb.NARInfo_Signature{},
|
||||
ReferenceNames: []string{},
|
||||
},
|
||||
}
|
||||
requireProtoEq(t, expectedPathInfo, actualPathInfo)
|
||||
}, rootNode)
|
||||
require.Equal(t, []byte{
|
||||
0xc6, 0xe1, 0x55, 0xb3, 0x45, 0x6e, 0x30, 0xb7, 0x61, 0x22, 0x63, 0xec, 0x09, 0x50, 0x70, 0x81, 0x1c, 0xaf, 0x8a, 0xbf, 0xd5, 0x9f, 0xaa, 0x72, 0xab, 0x82, 0xa5, 0x92, 0xef, 0xde, 0xb2, 0x53,
|
||||
}, narSha256)
|
||||
require.Equal(t, uint64(464152), narSize)
|
||||
}
|
||||
|
||||
// TestCallbackErrors ensures that errors returned from the callback function
|
||||
|
|
@ -514,7 +476,7 @@ func TestCallbackErrors(t *testing.T) {
|
|||
|
||||
targetErr := errors.New("expected error")
|
||||
|
||||
_, err = importer.Import(
|
||||
_, _, _, err = importer.Import(
|
||||
context.Background(),
|
||||
f,
|
||||
func(blobReader io.Reader) ([]byte, error) {
|
||||
|
|
@ -532,7 +494,7 @@ func TestCallbackErrors(t *testing.T) {
|
|||
|
||||
targetErr := errors.New("expected error")
|
||||
|
||||
_, err = importer.Import(
|
||||
_, _, _, err = importer.Import(
|
||||
context.Background(),
|
||||
f,
|
||||
func(blobReader io.Reader) ([]byte, error) {
|
||||
|
|
@ -562,7 +524,7 @@ func TestPopDirectories(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer f.Close()
|
||||
|
||||
_, err = importer.Import(
|
||||
_, _, _, err = importer.Import(
|
||||
context.Background(),
|
||||
f,
|
||||
func(blobReader io.Reader) ([]byte, error) { return mustBlobDigest(blobReader), nil },
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ func TestRoundtrip(t *testing.T) {
|
|||
blobsMap := make(map[string][]byte, 0)
|
||||
directoriesMap := make(map[string]*castorev1pb.Directory)
|
||||
|
||||
pathInfo, err := importer.Import(
|
||||
rootNode, _, _, err := importer.Import(
|
||||
context.Background(),
|
||||
bytes.NewBuffer(narContents),
|
||||
func(blobReader io.Reader) ([]byte, error) {
|
||||
|
|
@ -56,10 +56,10 @@ func TestRoundtrip(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
// done populating everything, now actually test the export :-)
|
||||
var buf bytes.Buffer
|
||||
var narBuf bytes.Buffer
|
||||
err = storev1pb.Export(
|
||||
&buf,
|
||||
pathInfo.Node,
|
||||
&narBuf,
|
||||
rootNode,
|
||||
func(directoryDgst []byte) (*castorev1pb.Directory, error) {
|
||||
d, found := directoriesMap[base64.StdEncoding.EncodeToString(directoryDgst)]
|
||||
if !found {
|
||||
|
|
@ -77,5 +77,5 @@ func TestRoundtrip(t *testing.T) {
|
|||
)
|
||||
|
||||
require.NoError(t, err, "exporter shouldn't fail")
|
||||
require.Equal(t, narContents, buf.Bytes())
|
||||
require.Equal(t, narContents, narBuf.Bytes())
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue