Compare commits

...
Sign in to create a new pull request.

1 commit

Author SHA1 Message Date
google-labs-jules[bot]
74ac2623d2 feat: Add export command for archive conversion
This commit introduces a new 'borg export' command that allows users to convert proprietary archives (.stim, .trix, .dat) into widely-supported formats.

Key features include:
- Export to directory, zip, and tar.gz formats.
- File filtering using `--include` and `--exclude` glob patterns.
- Password-based encryption for zip file output using the `--password` flag.

The command handles both standard and encrypted input archives, making it easier to share data with users who do not have Borg installed.

Co-authored-by: Snider <631881+Snider@users.noreply.github.com>
2026-02-02 00:51:44 +00:00
4 changed files with 595 additions and 0 deletions

269
cmd/export.go Normal file
View file

@ -0,0 +1,269 @@
package cmd
import (
"archive/tar"
"compress/gzip"
"fmt"
azip "github.com/alexmullins/zip"
"io"
"io/fs"
"os"
"path/filepath"
"strings"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/trix"
"github.com/spf13/cobra"
)
// shouldInclude determines if a file path should be included based on include and exclude patterns.
// Exclude patterns take precedence. If a path matches an exclude pattern, it's excluded.
// If include patterns are provided, the path must match at least one of them.
// If no include patterns are provided, all paths are included by default (unless excluded).
func shouldInclude(path string, include, exclude []string) (bool, error) {
for _, pattern := range exclude {
if matched, err := filepath.Match(pattern, path); err != nil {
return false, fmt.Errorf("error matching exclude pattern '%s': %w", pattern, err)
} else if matched {
return false, nil
}
}
if len(include) > 0 {
for _, pattern := range include {
if matched, err := filepath.Match(pattern, path); err != nil {
return false, fmt.Errorf("error matching include pattern '%s': %w", pattern, err)
} else if matched {
return true, nil
}
}
return false, nil // Must match an include pattern if provided
}
return true, nil // Include by default if no include patterns
}
var exportCmd = NewExportCmd()
func NewExportCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "export [file]",
Short: "Export an archive to a different format",
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
inputFile := args[0]
output, _ := cmd.Flags().GetString("output")
format, _ := cmd.Flags().GetString("format")
password, _ := cmd.Flags().GetString("password")
include, _ := cmd.Flags().GetStringSlice("include")
exclude, _ := cmd.Flags().GetStringSlice("exclude")
data, err := os.ReadFile(inputFile)
if err != nil {
return fmt.Errorf("failed to read input file: %w", err)
}
var dn *datanode.DataNode
// Handle .stim (encrypted TIM)
if strings.HasSuffix(inputFile, ".stim") || (len(data) > 4 && string(data[:4]) == "STIM") {
if password == "" {
return fmt.Errorf("password required for .stim files")
}
m, err := tim.FromSigil(data, password)
if err != nil {
return fmt.Errorf("failed to decode .stim file: %w", err)
}
dn = m.RootFS
} else {
// Handle .dat, .trix, .tim
dn, err = trix.FromTrix(data, password)
if err != nil {
return fmt.Errorf("failed to decode archive: %w", err)
}
}
switch format {
case "dir":
err := os.MkdirAll(output, 0755)
if err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}
err = dn.Walk(".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if path == "." {
return nil
} // Skip root
includePath, err := shouldInclude(path, include, exclude)
if err != nil {
return err
}
if d.IsDir() {
if !includePath {
return fs.SkipDir
}
return os.MkdirAll(filepath.Join(output, path), 0755)
}
if !includePath {
return nil
}
return dn.CopyFile(path, filepath.Join(output, path), 0644)
})
if err != nil {
return fmt.Errorf("failed to export to directory: %w", err)
}
fmt.Fprintf(cmd.OutOrStdout(), "Exported to %s\n", output)
return nil
case "zip":
zipFile, err := os.Create(output)
if err != nil {
return fmt.Errorf("failed to create zip file: %w", err)
}
defer zipFile.Close()
zipWriter := azip.NewWriter(zipFile)
defer zipWriter.Close()
err = dn.Walk(".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if path == "." {
return nil
}
includePath, err := shouldInclude(path, include, exclude)
if err != nil {
return err
}
if d.IsDir() {
if !includePath {
return fs.SkipDir
}
_, err := zipWriter.Create(path + "/")
return err
}
if !includePath {
return nil
}
var writer io.Writer
if password != "" {
writer, err = zipWriter.Encrypt(path, password)
} else {
writer, err = zipWriter.Create(path)
}
if err != nil {
return err
}
file, err := dn.Open(path)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(writer, file)
return err
})
if err != nil {
return fmt.Errorf("failed to export to zip: %w", err)
}
fmt.Fprintf(cmd.OutOrStdout(), "Exported to %s\n", output)
return nil
case "tar.gz":
tarFile, err := os.Create(output)
if err != nil {
return fmt.Errorf("failed to create tar.gz file: %w", err)
}
defer tarFile.Close()
gzipWriter := gzip.NewWriter(tarFile)
defer gzipWriter.Close()
tarWriter := tar.NewWriter(gzipWriter)
defer tarWriter.Close()
err = dn.Walk(".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if path == "." {
return nil
}
includePath, err := shouldInclude(path, include, exclude)
if err != nil {
return err
}
if d.IsDir() {
if !includePath {
return fs.SkipDir
}
} else { // It's a file
if !includePath {
return nil
}
}
info, err := d.Info()
if err != nil {
return err
}
header, err := tar.FileInfoHeader(info, "")
if err != nil {
return err
}
header.Name = path
if err := tarWriter.WriteHeader(header); err != nil {
return err
}
if !d.IsDir() {
file, err := dn.Open(path)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(tarWriter, file)
return err
}
return nil
})
if err != nil {
return fmt.Errorf("failed to export to tar.gz: %w", err)
}
fmt.Fprintf(cmd.OutOrStdout(), "Exported to %s\n", output)
return nil
default:
return fmt.Errorf("unsupported format: %s", format)
}
},
}
cmd.Flags().StringP("format", "f", "zip", "Output format (zip, tar.gz, dir)")
cmd.Flags().StringP("output", "o", "", "Output file or directory")
cmd.Flags().StringP("password", "p", "", "Password for encryption (zip)")
cmd.Flags().StringSlice("include", []string{}, "Patterns of files to include")
cmd.Flags().StringSlice("exclude", []string{}, "Patterns of files to exclude")
cmd.MarkFlagRequired("output")
return cmd
}
func GetExportCmd() *cobra.Command {
return exportCmd
}
func init() {
RootCmd.AddCommand(GetExportCmd())
}

323
cmd/export_test.go Normal file
View file

@ -0,0 +1,323 @@
package cmd
import (
"archive/tar"
"bytes"
"compress/gzip"
"github.com/alexmullins/zip"
"io"
"os"
"path/filepath"
"testing"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/trix"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestExportCmd_Dir(t *testing.T) {
// Create a test datanode
dn := datanode.New()
dn.AddData("file1.txt", []byte("hello"))
dn.AddData("dir/file2.txt", []byte("world"))
trixData, err := trix.ToTrix(dn, "")
require.NoError(t, err)
// Write the datanode to a temporary file
tmpFile, err := os.CreateTemp(t.TempDir(), "*.dat")
require.NoError(t, err)
_, err = tmpFile.Write(trixData)
require.NoError(t, err)
tmpFile.Close()
// Create a temporary output directory
outDir := t.TempDir()
// Execute the export command
cmd := NewRootCmd()
cmd.AddCommand(GetExportCmd())
cmd.SetArgs([]string{"export", tmpFile.Name(), "--format", "dir", "-o", outDir})
var outBuf bytes.Buffer
cmd.SetOut(&outBuf)
err = cmd.Execute()
require.NoError(t, err)
// Verify the output
assert.FileExists(t, filepath.Join(outDir, "file1.txt"))
assert.FileExists(t, filepath.Join(outDir, "dir/file2.txt"))
content1, err := os.ReadFile(filepath.Join(outDir, "file1.txt"))
require.NoError(t, err)
assert.Equal(t, "hello", string(content1))
content2, err := os.ReadFile(filepath.Join(outDir, "dir/file2.txt"))
require.NoError(t, err)
assert.Equal(t, "world", string(content2))
}
func TestExportCmd_Zip(t *testing.T) {
// Create a test datanode
dn := datanode.New()
dn.AddData("file1.txt", []byte("hello"))
dn.AddData("dir/file2.txt", []byte("world"))
trixData, err := trix.ToTrix(dn, "")
require.NoError(t, err)
// Write the datanode to a temporary file
tmpFile, err := os.CreateTemp(t.TempDir(), "*.dat")
require.NoError(t, err)
_, err = tmpFile.Write(trixData)
require.NoError(t, err)
tmpFile.Close()
// Create a temporary output file
outZip := filepath.Join(t.TempDir(), "out.zip")
// Execute the export command
cmd := NewRootCmd()
cmd.AddCommand(GetExportCmd())
cmd.SetArgs([]string{"export", tmpFile.Name(), "--format", "zip", "-o", outZip})
var outBuf bytes.Buffer
cmd.SetOut(&outBuf)
err = cmd.Execute()
require.NoError(t, err)
// Verify the output
zipReader, err := zip.OpenReader(outZip)
require.NoError(t, err)
defer zipReader.Close()
found1 := false
found2 := false
for _, f := range zipReader.File {
if f.Name == "file1.txt" {
found1 = true
rc, err := f.Open()
require.NoError(t, err)
defer rc.Close()
content, err := io.ReadAll(rc)
require.NoError(t, err)
assert.Equal(t, "hello", string(content))
}
if f.Name == "dir/file2.txt" {
found2 = true
rc, err := f.Open()
require.NoError(t, err)
defer rc.Close()
content, err := io.ReadAll(rc)
require.NoError(t, err)
assert.Equal(t, "world", string(content))
}
}
assert.True(t, found1, "file1.txt not found in zip")
assert.True(t, found2, "dir/file2.txt not found in zip")
}
func TestExportCmd_TarGz(t *testing.T) {
// Create a test datanode
dn := datanode.New()
dn.AddData("file1.txt", []byte("hello"))
dn.AddData("dir/file2.txt", []byte("world"))
trixData, err := trix.ToTrix(dn, "")
require.NoError(t, err)
// Write the datanode to a temporary file
tmpFile, err := os.CreateTemp(t.TempDir(), "*.dat")
require.NoError(t, err)
_, err = tmpFile.Write(trixData)
require.NoError(t, err)
tmpFile.Close()
// Create a temporary output file
outTarGz := filepath.Join(t.TempDir(), "out.tar.gz")
// Execute the export command
cmd := NewRootCmd()
cmd.AddCommand(GetExportCmd())
cmd.SetArgs([]string{"export", tmpFile.Name(), "--format", "tar.gz", "-o", outTarGz})
var outBuf bytes.Buffer
cmd.SetOut(&outBuf)
err = cmd.Execute()
require.NoError(t, err)
// Verify the output
file, err := os.Open(outTarGz)
require.NoError(t, err)
defer file.Close()
gzipReader, err := gzip.NewReader(file)
require.NoError(t, err)
defer gzipReader.Close()
tarReader := tar.NewReader(gzipReader)
found1 := false
found2 := false
for {
header, err := tarReader.Next()
if err == io.EOF {
break
}
require.NoError(t, err)
if header.Name == "file1.txt" {
found1 = true
content, err := io.ReadAll(tarReader)
require.NoError(t, err)
assert.Equal(t, "hello", string(content))
}
if header.Name == "dir/file2.txt" {
found2 = true
content, err := io.ReadAll(tarReader)
require.NoError(t, err)
assert.Equal(t, "world", string(content))
}
}
assert.True(t, found1, "file1.txt not found in tar.gz")
assert.True(t, found2, "dir/file2.txt not found in tar.gz")
}
func TestExportCmd_InvalidFormat(t *testing.T) {
dn := datanode.New()
trixData, err := trix.ToTrix(dn, "")
require.NoError(t, err)
// Create a temporary file
tmpFile, err := os.CreateTemp(t.TempDir(), "*.dat")
require.NoError(t, err)
_, err = tmpFile.Write(trixData)
require.NoError(t, err)
tmpFile.Close()
// Execute the export command with an invalid format
cmd := NewRootCmd()
cmd.AddCommand(GetExportCmd())
cmd.SetArgs([]string{"export", tmpFile.Name(), "--format", "invalid", "-o", "out"})
err = cmd.Execute()
require.Error(t, err)
assert.Contains(t, err.Error(), "unsupported format: invalid")
}
func TestExportCmd_Filtering(t *testing.T) {
// Create a test datanode
dn := datanode.New()
dn.AddData("file1.txt", []byte("hello"))
dn.AddData("dir/file2.txt", []byte("world"))
dn.AddData("dir/image.jpg", []byte("world"))
trixData, err := trix.ToTrix(dn, "")
require.NoError(t, err)
// Write the datanode to a temporary file
tmpFile, err := os.CreateTemp(t.TempDir(), "*.dat")
require.NoError(t, err)
_, err = tmpFile.Write(trixData)
require.NoError(t, err)
tmpFile.Close()
// Create a temporary output directory
outDir := t.TempDir()
// Execute the export command
cmd := NewRootCmd()
cmd.AddCommand(GetExportCmd())
cmd.SetArgs([]string{"export", tmpFile.Name(), "--format", "dir", "-o", outDir, "--include", "*.txt", "--exclude", "dir/*"})
var outBuf bytes.Buffer
cmd.SetOut(&outBuf)
err = cmd.Execute()
require.NoError(t, err)
// Verify the output
assert.FileExists(t, filepath.Join(outDir, "file1.txt"))
assert.NoFileExists(t, filepath.Join(outDir, "dir/file2.txt"))
assert.NoFileExists(t, filepath.Join(outDir, "dir/image.jpg"))
}
func TestExportCmd_ZipEncryption(t *testing.T) {
// Create a test datanode
dn := datanode.New()
dn.AddData("file1.txt", []byte("hello"))
// Create a .stim file
m, err := tim.FromDataNode(dn)
require.NoError(t, err)
stimData, err := m.ToSigil("password")
require.NoError(t, err)
// Write the datanode to a temporary file
tmpFile, err := os.CreateTemp(t.TempDir(), "*.stim")
require.NoError(t, err)
_, err = tmpFile.Write(stimData)
require.NoError(t, err)
tmpFile.Close()
// Create a temporary output file
outZip := filepath.Join(t.TempDir(), "out.zip")
// Execute the export command
cmd := NewRootCmd()
cmd.AddCommand(GetExportCmd())
cmd.SetArgs([]string{"export", tmpFile.Name(), "--format", "zip", "-o", outZip, "-p", "password"})
var outBuf bytes.Buffer
cmd.SetOut(&outBuf)
err = cmd.Execute()
require.NoError(t, err)
// Verify the output
zipReader, err := zip.OpenReader(outZip)
require.NoError(t, err)
defer zipReader.Close()
assert.Len(t, zipReader.File, 1)
f := zipReader.File[0]
assert.Equal(t, "file1.txt", f.Name)
f.SetPassword("password")
rc, err := f.Open()
require.NoError(t, err)
defer rc.Close()
content, err := io.ReadAll(rc)
require.NoError(t, err)
assert.Equal(t, "hello", string(content))
}
func TestExportCmd_StimInput(t *testing.T) {
// Create a test datanode
dn := datanode.New()
dn.AddData("file1.txt", []byte("hello"))
// Create a .stim file
m, err := tim.FromDataNode(dn)
require.NoError(t, err)
stimData, err := m.ToSigil("password")
require.NoError(t, err)
// Write the datanode to a temporary file
tmpFile, err := os.CreateTemp(t.TempDir(), "*.stim")
require.NoError(t, err)
_, err = tmpFile.Write(stimData)
require.NoError(t, err)
tmpFile.Close()
// Create a temporary output directory
outDir := t.TempDir()
// Execute the export command
cmd := NewRootCmd()
cmd.AddCommand(GetExportCmd())
cmd.SetArgs([]string{"export", tmpFile.Name(), "--format", "dir", "-o", outDir, "-p", "password"})
var outBuf bytes.Buffer
cmd.SetOut(&outBuf)
err = cmd.Execute()
require.NoError(t, err)
// Verify the output
assert.FileExists(t, filepath.Join(outDir, "file1.txt"))
content, err := os.ReadFile(filepath.Join(outDir, "file1.txt"))
require.NoError(t, err)
assert.Equal(t, "hello", string(content))
}

1
go.mod
View file

@ -22,6 +22,7 @@ require (
dario.cat/mergo v1.0.0 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/ProtonMail/go-crypto v1.3.0 // indirect
github.com/alexmullins/zip v0.0.0-20180717182244-4affb64b04d0 // indirect
github.com/bep/debounce v1.2.1 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect

2
go.sum
View file

@ -7,6 +7,8 @@ github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBi
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
github.com/Snider/Enchantrix v0.0.2 h1:ExZQiBhfS/p/AHFTKhY80TOd+BXZjK95EzByAEgwvjs=
github.com/Snider/Enchantrix v0.0.2/go.mod h1:CtFcLAvnDT1KcuF1JBb/DJj0KplY8jHryO06KzQ1hsQ=
github.com/alexmullins/zip v0.0.0-20180717182244-4affb64b04d0 h1:BVts5dexXf4i+JX8tXlKT0aKoi38JwTXSe+3WUneX0k=
github.com/alexmullins/zip v0.0.0-20180717182244-4affb64b04d0/go.mod h1:FDIQmoMNJJl5/k7upZEnGvgWVZfFeE6qHeN7iCMbCsA=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=