From 1838ea18abf597620318c0c6e4d28e10854f8ee1 Mon Sep 17 00:00:00 2001 From: Vi Date: Thu, 5 Feb 2026 20:30:23 +0000 Subject: [PATCH] feat(io): add Node in-memory filesystem (port from Borg DataNode) (#343) (#352) Co-authored-by: Claude Co-authored-by: Claude Opus 4.6 --- pkg/io/node/node.go | 382 +++++++++++++++++++++++++++ pkg/io/node/node_test.go | 543 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 925 insertions(+) create mode 100644 pkg/io/node/node.go create mode 100644 pkg/io/node/node_test.go diff --git a/pkg/io/node/node.go b/pkg/io/node/node.go new file mode 100644 index 00000000..a213cb28 --- /dev/null +++ b/pkg/io/node/node.go @@ -0,0 +1,382 @@ +package node + +import ( + "archive/tar" + "bytes" + "io" + "io/fs" + "os" + "path" + "sort" + "strings" + "time" +) + +// Node is an in-memory filesystem that implements fs.FS, fs.StatFS, +// and fs.ReadFileFS. It stores files as byte slices keyed by their +// path, with directories being implicit based on path prefixes. +// +// Ported from github.com/Snider/Borg/pkg/datanode. +type Node struct { + files map[string]*nodeFile +} + +// Compile-time interface checks. +var ( + _ fs.FS = (*Node)(nil) + _ fs.StatFS = (*Node)(nil) + _ fs.ReadFileFS = (*Node)(nil) +) + +// New creates a new, empty Node. +func New() *Node { + return &Node{files: make(map[string]*nodeFile)} +} + +// FromTar creates a new Node from a tarball. +func FromTar(tarball []byte) (*Node, error) { + n := New() + tarReader := tar.NewReader(bytes.NewReader(tarball)) + + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + if header.Typeflag == tar.TypeReg { + data, err := io.ReadAll(tarReader) + if err != nil { + return nil, err + } + n.AddData(header.Name, data) + } + } + + return n, nil +} + +// ToTar serializes the Node to a tarball. +func (n *Node) ToTar() ([]byte, error) { + buf := new(bytes.Buffer) + tw := tar.NewWriter(buf) + + for _, file := range n.files { + hdr := &tar.Header{ + Name: file.name, + Mode: 0600, + Size: int64(len(file.content)), + ModTime: file.modTime, + } + if err := tw.WriteHeader(hdr); err != nil { + return nil, err + } + if _, err := tw.Write(file.content); err != nil { + return nil, err + } + } + + if err := tw.Close(); err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +// AddData adds a file to the Node. If a file with the same name +// already exists it is overwritten. Directory entries (names ending +// in "/") and empty names are silently ignored. +func (n *Node) AddData(name string, content []byte) { + name = strings.TrimPrefix(name, "/") + if name == "" { + return + } + // Directories are implicit, so we don't store them. + // A name ending in "/" is treated as a directory. + if strings.HasSuffix(name, "/") { + return + } + n.files[name] = &nodeFile{ + name: name, + content: content, + modTime: time.Now(), + } +} + +// Open opens a file from the Node, satisfying the fs.FS interface. +func (n *Node) Open(name string) (fs.File, error) { + name = strings.TrimPrefix(name, "/") + if file, ok := n.files[name]; ok { + return &nodeFileReader{file: file}, nil + } + // Check if it's a directory. + prefix := name + "/" + if name == "." || name == "" { + prefix = "" + } + for p := range n.files { + if strings.HasPrefix(p, prefix) { + return &dirFile{path: name, modTime: time.Now()}, nil + } + } + return nil, fs.ErrNotExist +} + +// Stat returns the FileInfo for the named file, satisfying the +// fs.StatFS interface. +func (n *Node) Stat(name string) (fs.FileInfo, error) { + name = strings.TrimPrefix(name, "/") + if file, ok := n.files[name]; ok { + return file.Stat() + } + // Check if it's a directory. + prefix := name + "/" + if name == "." || name == "" { + prefix = "" + } + for p := range n.files { + if strings.HasPrefix(p, prefix) { + return &dirInfo{name: path.Base(name), modTime: time.Now()}, nil + } + } + return nil, fs.ErrNotExist +} + +// ReadFile reads the named file and returns its contents, satisfying +// the fs.ReadFileFS interface. +func (n *Node) ReadFile(name string) ([]byte, error) { + name = strings.TrimPrefix(name, "/") + if file, ok := n.files[name]; ok { + // Return a copy so callers cannot mutate the internal state. + out := make([]byte, len(file.content)) + copy(out, file.content) + return out, nil + } + return nil, fs.ErrNotExist +} + +// ReadDir reads and returns all directory entries for the named directory. +func (n *Node) ReadDir(name string) ([]fs.DirEntry, error) { + name = strings.TrimPrefix(name, "/") + if name == "." { + name = "" + } + + // Disallow reading a file as a directory. + if info, err := n.Stat(name); err == nil && !info.IsDir() { + return nil, &fs.PathError{Op: "readdir", Path: name, Err: fs.ErrInvalid} + } + + entries := []fs.DirEntry{} + seen := make(map[string]bool) + + prefix := "" + if name != "" { + prefix = name + "/" + } + + for p := range n.files { + if !strings.HasPrefix(p, prefix) { + continue + } + + relPath := strings.TrimPrefix(p, prefix) + firstComponent := strings.Split(relPath, "/")[0] + + if seen[firstComponent] { + continue + } + seen[firstComponent] = true + + if strings.Contains(relPath, "/") { + // It's a directory. + dir := &dirInfo{name: firstComponent, modTime: time.Now()} + entries = append(entries, fs.FileInfoToDirEntry(dir)) + } else { + // It's a file. + file := n.files[p] + info, _ := file.Stat() + entries = append(entries, fs.FileInfoToDirEntry(info)) + } + } + + // Sort for stable order. + sort.Slice(entries, func(i, j int) bool { + return entries[i].Name() < entries[j].Name() + }) + + return entries, nil +} + +// Exists returns true if the file or directory exists in the Node. +func (n *Node) Exists(name string, opts ...ExistsOptions) (bool, error) { + info, err := n.Stat(name) + if err != nil { + if err == fs.ErrNotExist || os.IsNotExist(err) { + return false, nil + } + return false, err + } + if len(opts) > 0 { + if opts[0].WantType == fs.ModeDir && !info.IsDir() { + return false, nil + } + if opts[0].WantType != fs.ModeDir && info.IsDir() { + return false, nil + } + } + return true, nil +} + +// ExistsOptions allows customizing the Exists check. +type ExistsOptions struct { + WantType fs.FileMode +} + +// WalkOptions allows customizing the Walk behavior. +type WalkOptions struct { + MaxDepth int + Filter func(path string, d fs.DirEntry) bool + SkipErrors bool +} + +// Walk recursively descends the file tree rooted at root, calling fn +// for each file or directory in the tree. +func (n *Node) Walk(root string, fn fs.WalkDirFunc, opts ...WalkOptions) error { + var maxDepth int + var filter func(string, fs.DirEntry) bool + var skipErrors bool + if len(opts) > 0 { + maxDepth = opts[0].MaxDepth + filter = opts[0].Filter + skipErrors = opts[0].SkipErrors + } + + return fs.WalkDir(n, root, func(p string, de fs.DirEntry, err error) error { + if err != nil { + if skipErrors { + return nil + } + return fn(p, de, err) + } + if filter != nil && !filter(p, de) { + if de.IsDir() { + return fs.SkipDir + } + return nil + } + + // Process the entry first. + if err := fn(p, de, nil); err != nil { + return err + } + + if maxDepth > 0 { + // Calculate depth relative to root. + cleanedPath := strings.TrimPrefix(p, root) + cleanedPath = strings.TrimPrefix(cleanedPath, "/") + + currentDepth := 0 + if p != root { + if cleanedPath == "" { + currentDepth = 0 + } else { + currentDepth = strings.Count(cleanedPath, "/") + 1 + } + } + + if de.IsDir() && currentDepth >= maxDepth { + return fs.SkipDir + } + } + return nil + }) +} + +// CopyFile copies a file from the Node to the local filesystem. +func (n *Node) CopyFile(sourcePath string, target string, perm os.FileMode) error { + sourceFile, err := n.Open(sourcePath) + if err != nil { + return err + } + defer sourceFile.Close() + + targetFile, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, perm) + if err != nil { + return err + } + defer targetFile.Close() + + _, err = io.Copy(targetFile, sourceFile) + return err +} + +// --------------------------------------------------------------------------- +// Internal types +// --------------------------------------------------------------------------- + +// nodeFile represents a file stored in the Node. +type nodeFile struct { + name string + content []byte + modTime time.Time +} + +func (f *nodeFile) Stat() (fs.FileInfo, error) { return &nodeFileInfo{file: f}, nil } +func (f *nodeFile) Read([]byte) (int, error) { return 0, io.EOF } +func (f *nodeFile) Close() error { return nil } + +// nodeFileInfo implements fs.FileInfo for a nodeFile. +type nodeFileInfo struct{ file *nodeFile } + +func (i *nodeFileInfo) Name() string { return path.Base(i.file.name) } +func (i *nodeFileInfo) Size() int64 { return int64(len(i.file.content)) } +func (i *nodeFileInfo) Mode() fs.FileMode { return 0444 } +func (i *nodeFileInfo) ModTime() time.Time { return i.file.modTime } +func (i *nodeFileInfo) IsDir() bool { return false } +func (i *nodeFileInfo) Sys() interface{} { return nil } + +// nodeFileReader implements fs.File for reading a nodeFile. +type nodeFileReader struct { + file *nodeFile + reader *bytes.Reader +} + +func (r *nodeFileReader) Stat() (fs.FileInfo, error) { return r.file.Stat() } +func (r *nodeFileReader) Read(p []byte) (int, error) { + if r.reader == nil { + r.reader = bytes.NewReader(r.file.content) + } + return r.reader.Read(p) +} +func (r *nodeFileReader) Close() error { return nil } + +// dirInfo implements fs.FileInfo for an implicit directory. +type dirInfo struct { + name string + modTime time.Time +} + +func (d *dirInfo) Name() string { return d.name } +func (d *dirInfo) Size() int64 { return 0 } +func (d *dirInfo) Mode() fs.FileMode { return fs.ModeDir | 0555 } +func (d *dirInfo) ModTime() time.Time { return d.modTime } +func (d *dirInfo) IsDir() bool { return true } +func (d *dirInfo) Sys() interface{} { return nil } + +// dirFile implements fs.File for a directory. +type dirFile struct { + path string + modTime time.Time +} + +func (d *dirFile) Stat() (fs.FileInfo, error) { + return &dirInfo{name: path.Base(d.path), modTime: d.modTime}, nil +} +func (d *dirFile) Read([]byte) (int, error) { + return 0, &fs.PathError{Op: "read", Path: d.path, Err: fs.ErrInvalid} +} +func (d *dirFile) Close() error { return nil } diff --git a/pkg/io/node/node_test.go b/pkg/io/node/node_test.go new file mode 100644 index 00000000..5ef1afab --- /dev/null +++ b/pkg/io/node/node_test.go @@ -0,0 +1,543 @@ +package node + +import ( + "archive/tar" + "bytes" + "errors" + "io" + "io/fs" + "os" + "path/filepath" + "sort" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// --------------------------------------------------------------------------- +// New +// --------------------------------------------------------------------------- + +func TestNew_Good(t *testing.T) { + n := New() + require.NotNil(t, n, "New() must not return nil") + assert.NotNil(t, n.files, "New() must initialize the files map") +} + +// --------------------------------------------------------------------------- +// AddData +// --------------------------------------------------------------------------- + +func TestAddData_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + file, ok := n.files["foo.txt"] + require.True(t, ok, "file foo.txt should be present") + assert.Equal(t, []byte("foo"), file.content) + + info, err := file.Stat() + require.NoError(t, err) + assert.Equal(t, "foo.txt", info.Name()) +} + +func TestAddData_Bad(t *testing.T) { + n := New() + + // Empty name is silently ignored. + n.AddData("", []byte("data")) + assert.Empty(t, n.files, "empty name must not be stored") + + // Directory entry (trailing slash) is silently ignored. + n.AddData("dir/", nil) + assert.Empty(t, n.files, "directory entry must not be stored") +} + +func TestAddData_Ugly(t *testing.T) { + t.Run("Overwrite", func(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("foo.txt", []byte("bar")) + + file := n.files["foo.txt"] + assert.Equal(t, []byte("bar"), file.content, "second AddData should overwrite") + }) + + t.Run("LeadingSlash", func(t *testing.T) { + n := New() + n.AddData("/hello.txt", []byte("hi")) + _, ok := n.files["hello.txt"] + assert.True(t, ok, "leading slash should be trimmed") + }) +} + +// --------------------------------------------------------------------------- +// Open +// --------------------------------------------------------------------------- + +func TestOpen_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + file, err := n.Open("foo.txt") + require.NoError(t, err) + defer file.Close() + + buf := make([]byte, 10) + nr, err := file.Read(buf) + require.True(t, nr > 0 || err == io.EOF) + assert.Equal(t, "foo", string(buf[:nr])) +} + +func TestOpen_Bad(t *testing.T) { + n := New() + _, err := n.Open("nonexistent.txt") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestOpen_Ugly(t *testing.T) { + n := New() + n.AddData("bar/baz.txt", []byte("baz")) + + // Opening a directory should succeed. + file, err := n.Open("bar") + require.NoError(t, err) + defer file.Close() + + // Reading from a directory should fail. + _, err = file.Read(make([]byte, 1)) + require.Error(t, err) + + var pathErr *fs.PathError + require.True(t, errors.As(err, &pathErr)) + assert.Equal(t, fs.ErrInvalid, pathErr.Err) +} + +// --------------------------------------------------------------------------- +// Stat +// --------------------------------------------------------------------------- + +func TestStat_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + + // File stat. + info, err := n.Stat("bar/baz.txt") + require.NoError(t, err) + assert.Equal(t, "baz.txt", info.Name()) + assert.Equal(t, int64(3), info.Size()) + assert.False(t, info.IsDir()) + + // Directory stat. + dirInfo, err := n.Stat("bar") + require.NoError(t, err) + assert.True(t, dirInfo.IsDir()) + assert.Equal(t, "bar", dirInfo.Name()) +} + +func TestStat_Bad(t *testing.T) { + n := New() + _, err := n.Stat("nonexistent") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestStat_Ugly(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + // Root directory. + info, err := n.Stat(".") + require.NoError(t, err) + assert.True(t, info.IsDir()) + assert.Equal(t, ".", info.Name()) +} + +// --------------------------------------------------------------------------- +// ReadFile +// --------------------------------------------------------------------------- + +func TestReadFile_Good(t *testing.T) { + n := New() + n.AddData("hello.txt", []byte("hello world")) + + data, err := n.ReadFile("hello.txt") + require.NoError(t, err) + assert.Equal(t, []byte("hello world"), data) +} + +func TestReadFile_Bad(t *testing.T) { + n := New() + _, err := n.ReadFile("missing.txt") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestReadFile_Ugly(t *testing.T) { + n := New() + n.AddData("data.bin", []byte("original")) + + // Returned slice must be a copy — mutating it must not affect internal state. + data, err := n.ReadFile("data.bin") + require.NoError(t, err) + data[0] = 'X' + + data2, err := n.ReadFile("data.bin") + require.NoError(t, err) + assert.Equal(t, []byte("original"), data2, "ReadFile must return an independent copy") +} + +// --------------------------------------------------------------------------- +// ReadDir +// --------------------------------------------------------------------------- + +func TestReadDir_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + n.AddData("bar/qux.txt", []byte("qux")) + + // Root. + entries, err := n.ReadDir(".") + require.NoError(t, err) + assert.Equal(t, []string{"bar", "foo.txt"}, sortedNames(entries)) + + // Subdirectory. + barEntries, err := n.ReadDir("bar") + require.NoError(t, err) + assert.Equal(t, []string{"baz.txt", "qux.txt"}, sortedNames(barEntries)) +} + +func TestReadDir_Bad(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + // Reading a file as a directory should fail. + _, err := n.ReadDir("foo.txt") + require.Error(t, err) + var pathErr *fs.PathError + require.True(t, errors.As(err, &pathErr)) + assert.Equal(t, fs.ErrInvalid, pathErr.Err) +} + +func TestReadDir_Ugly(t *testing.T) { + n := New() + n.AddData("bar/baz.txt", []byte("baz")) + n.AddData("empty_dir/", nil) // Ignored by AddData. + + entries, err := n.ReadDir(".") + require.NoError(t, err) + assert.Equal(t, []string{"bar"}, sortedNames(entries)) +} + +// --------------------------------------------------------------------------- +// Exists +// --------------------------------------------------------------------------- + +func TestExists_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + + exists, err := n.Exists("foo.txt") + require.NoError(t, err) + assert.True(t, exists) + + exists, err = n.Exists("bar") + require.NoError(t, err) + assert.True(t, exists) +} + +func TestExists_Bad(t *testing.T) { + n := New() + exists, err := n.Exists("nonexistent") + require.NoError(t, err) + assert.False(t, exists) +} + +func TestExists_Ugly(t *testing.T) { + n := New() + n.AddData("dummy.txt", []byte("dummy")) + + exists, err := n.Exists(".") + require.NoError(t, err) + assert.True(t, exists, "root '.' must exist") + + exists, err = n.Exists("") + require.NoError(t, err) + assert.True(t, exists, "empty path (root) must exist") +} + +// --------------------------------------------------------------------------- +// Walk +// --------------------------------------------------------------------------- + +func TestWalk_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + n.AddData("bar/qux.txt", []byte("qux")) + + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + paths = append(paths, p) + return nil + }) + require.NoError(t, err) + + sort.Strings(paths) + assert.Equal(t, []string{".", "bar", "bar/baz.txt", "bar/qux.txt", "foo.txt"}, paths) +} + +func TestWalk_Bad(t *testing.T) { + n := New() + + var called bool + err := n.Walk("nonexistent", func(p string, d fs.DirEntry, err error) error { + called = true + assert.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) + return err + }) + assert.True(t, called, "walk function must be called for nonexistent root") + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestWalk_Ugly(t *testing.T) { + n := New() + n.AddData("a/b.txt", []byte("b")) + n.AddData("a/c.txt", []byte("c")) + + // Stop walk early with a custom error. + walkErr := errors.New("stop walking") + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + if p == "a/b.txt" { + return walkErr + } + paths = append(paths, p) + return nil + }) + + assert.Equal(t, walkErr, err, "Walk must propagate the callback error") +} + +func TestWalk_Options(t *testing.T) { + n := New() + n.AddData("root.txt", []byte("root")) + n.AddData("a/a1.txt", []byte("a1")) + n.AddData("a/b/b1.txt", []byte("b1")) + n.AddData("c/c1.txt", []byte("c1")) + + t.Run("MaxDepth", func(t *testing.T) { + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + paths = append(paths, p) + return nil + }, WalkOptions{MaxDepth: 1}) + require.NoError(t, err) + + sort.Strings(paths) + assert.Equal(t, []string{".", "a", "c", "root.txt"}, paths) + }) + + t.Run("Filter", func(t *testing.T) { + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + paths = append(paths, p) + return nil + }, WalkOptions{Filter: func(p string, d fs.DirEntry) bool { + return !strings.HasPrefix(p, "a") + }}) + require.NoError(t, err) + + sort.Strings(paths) + assert.Equal(t, []string{".", "c", "c/c1.txt", "root.txt"}, paths) + }) + + t.Run("SkipErrors", func(t *testing.T) { + var called bool + err := n.Walk("nonexistent", func(p string, d fs.DirEntry, err error) error { + called = true + return err + }, WalkOptions{SkipErrors: true}) + + assert.NoError(t, err, "SkipErrors should suppress the error") + assert.False(t, called, "callback should not be called when error is skipped") + }) +} + +// --------------------------------------------------------------------------- +// CopyFile +// --------------------------------------------------------------------------- + +func TestCopyFile_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + tmpfile := filepath.Join(t.TempDir(), "test.txt") + err := n.CopyFile("foo.txt", tmpfile, 0644) + require.NoError(t, err) + + content, err := os.ReadFile(tmpfile) + require.NoError(t, err) + assert.Equal(t, "foo", string(content)) +} + +func TestCopyFile_Bad(t *testing.T) { + n := New() + tmpfile := filepath.Join(t.TempDir(), "test.txt") + + // Source does not exist. + err := n.CopyFile("nonexistent.txt", tmpfile, 0644) + assert.Error(t, err) + + // Destination not writable. + n.AddData("foo.txt", []byte("foo")) + err = n.CopyFile("foo.txt", "/nonexistent_dir/test.txt", 0644) + assert.Error(t, err) +} + +func TestCopyFile_Ugly(t *testing.T) { + n := New() + n.AddData("bar/baz.txt", []byte("baz")) + tmpfile := filepath.Join(t.TempDir(), "test.txt") + + // Attempting to copy a directory should fail. + err := n.CopyFile("bar", tmpfile, 0644) + assert.Error(t, err) +} + +// --------------------------------------------------------------------------- +// ToTar / FromTar +// --------------------------------------------------------------------------- + +func TestToTar_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + + tarball, err := n.ToTar() + require.NoError(t, err) + require.NotEmpty(t, tarball) + + // Verify tar content. + tr := tar.NewReader(bytes.NewReader(tarball)) + files := make(map[string]string) + for { + header, err := tr.Next() + if err == io.EOF { + break + } + require.NoError(t, err) + content, err := io.ReadAll(tr) + require.NoError(t, err) + files[header.Name] = string(content) + } + + assert.Equal(t, "foo", files["foo.txt"]) + assert.Equal(t, "baz", files["bar/baz.txt"]) +} + +func TestFromTar_Good(t *testing.T) { + buf := new(bytes.Buffer) + tw := tar.NewWriter(buf) + + for _, f := range []struct{ Name, Body string }{ + {"foo.txt", "foo"}, + {"bar/baz.txt", "baz"}, + } { + hdr := &tar.Header{ + Name: f.Name, + Mode: 0600, + Size: int64(len(f.Body)), + Typeflag: tar.TypeReg, + } + require.NoError(t, tw.WriteHeader(hdr)) + _, err := tw.Write([]byte(f.Body)) + require.NoError(t, err) + } + require.NoError(t, tw.Close()) + + n, err := FromTar(buf.Bytes()) + require.NoError(t, err) + + exists, _ := n.Exists("foo.txt") + assert.True(t, exists, "foo.txt should exist") + + exists, _ = n.Exists("bar/baz.txt") + assert.True(t, exists, "bar/baz.txt should exist") +} + +func TestFromTar_Bad(t *testing.T) { + // Truncated data that cannot be a valid tar. + truncated := make([]byte, 100) + _, err := FromTar(truncated) + assert.Error(t, err, "truncated data should produce an error") +} + +func TestTarRoundTrip_Good(t *testing.T) { + n1 := New() + n1.AddData("a.txt", []byte("alpha")) + n1.AddData("b/c.txt", []byte("charlie")) + + tarball, err := n1.ToTar() + require.NoError(t, err) + + n2, err := FromTar(tarball) + require.NoError(t, err) + + // Verify n2 matches n1. + data, err := n2.ReadFile("a.txt") + require.NoError(t, err) + assert.Equal(t, []byte("alpha"), data) + + data, err = n2.ReadFile("b/c.txt") + require.NoError(t, err) + assert.Equal(t, []byte("charlie"), data) +} + +// --------------------------------------------------------------------------- +// fs.FS interface compliance +// --------------------------------------------------------------------------- + +func TestFSInterface_Good(t *testing.T) { + n := New() + n.AddData("hello.txt", []byte("world")) + + // fs.FS + var fsys fs.FS = n + file, err := fsys.Open("hello.txt") + require.NoError(t, err) + defer file.Close() + + // fs.StatFS + var statFS fs.StatFS = n + info, err := statFS.Stat("hello.txt") + require.NoError(t, err) + assert.Equal(t, "hello.txt", info.Name()) + assert.Equal(t, int64(5), info.Size()) + + // fs.ReadFileFS + var readFS fs.ReadFileFS = n + data, err := readFS.ReadFile("hello.txt") + require.NoError(t, err) + assert.Equal(t, []byte("world"), data) +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +func sortedNames(entries []fs.DirEntry) []string { + var names []string + for _, e := range entries { + names = append(names, e.Name()) + } + sort.Strings(names) + return names +}