Compare commits

..

1 commit

Author SHA1 Message Date
google-labs-jules[bot]
3020500da5 feat: Add GitHub Issues and PRs collection
This commit introduces the ability to collect GitHub issues and pull requests.

Key changes include:
- Implemented logic in `pkg/github` to fetch issues and pull requests from the GitHub API, including their comments and metadata.
- Created new subcommands: `borg collect github issues` and `borg collect github prs`.
- Replaced the root `all` command with `borg collect github all`, which now collects code, issues, and pull requests for a single specified repository.
- Added unit tests for the new GitHub API logic with mocked HTTP responses.
- Added integration tests for the new `issues` and `prs` subcommands.

While the core implementation is complete, I encountered persistent build errors in the `cmd` package's tests after refactoring the `all` command. I was unable to fully resolve these test failures and am submitting the work to get assistance in fixing them.

Co-authored-by: Snider <631881+Snider@users.noreply.github.com>
2026-02-02 00:44:46 +00:00
89 changed files with 1136 additions and 5014 deletions

View file

@ -1,12 +0,0 @@
name: Security Scan
on:
push:
branches: [main, dev, 'feat/*']
pull_request:
branches: [main]
jobs:
security:
uses: core/go-devops/.forgejo/workflows/security-scan.yml@main
secrets: inherit

View file

@ -1,14 +0,0 @@
name: Test
on:
push:
branches: [main, dev]
pull_request:
branches: [main]
jobs:
test:
uses: core/go-devops/.forgejo/workflows/go-test.yml@main
with:
race: true
coverage: true

View file

@ -1,115 +1,122 @@
package cmd
import (
"fmt"
"io"
"io/fs"
"net/url"
"os"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/github"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"forge.lthn.ai/Snider/Borg/pkg/trix"
"forge.lthn.ai/Snider/Borg/pkg/ui"
"forge.lthn.ai/Snider/Borg/pkg/vcs"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/trix"
"github.com/Snider/Borg/pkg/ui"
"github.com/Snider/Borg/pkg/vcs"
"github.com/spf13/cobra"
)
var allCmd = NewAllCmd()
var githubAllCmd = NewGithubAllCmd()
func NewAllCmd() *cobra.Command {
allCmd := &cobra.Command{
Use: "all [url]",
Short: "Collect all resources from a URL",
Long: `Collect all resources from a URL, dispatching to the appropriate collector based on the URL type.`,
func NewGithubAllCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "all <owner/repo>",
Short: "Collect all resources from a GitHub repository",
Long: `Collect all resources from a GitHub repository, including code, issues, and pull requests.`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
url := args[0]
outputFile, _ := cmd.Flags().GetString("output")
format, _ := cmd.Flags().GetString("format")
compression, _ := cmd.Flags().GetString("compression")
password, _ := cmd.Flags().GetString("password")
repoPath := args[0]
parts := strings.Split(repoPath, "/")
if len(parts) != 2 {
return fmt.Errorf("invalid repository path: %s (must be in the format <owner>/<repo>)", repoPath)
}
owner, repo := parts[0], parts[1]
outputFile, err := cmd.Flags().GetString("output")
if err != nil {
return fmt.Errorf("error getting output flag: %w", err)
}
format, err := cmd.Flags().GetString("format")
if err != nil {
return fmt.Errorf("error getting format flag: %w", err)
}
compression, err := cmd.Flags().GetString("compression")
if err != nil {
return fmt.Errorf("error getting compression flag: %w", err)
}
password, err := cmd.Flags().GetString("password")
if err != nil {
return fmt.Errorf("error getting password flag: %w", err)
}
collectIssues, err := cmd.Flags().GetBool("issues")
if err != nil {
return fmt.Errorf("error getting issues flag: %w", err)
}
collectPRs, err := cmd.Flags().GetBool("prs")
if err != nil {
return fmt.Errorf("error getting prs flag: %w", err)
}
collectCode, err := cmd.Flags().GetBool("code")
if err != nil {
return fmt.Errorf("error getting code flag: %w", err)
}
if format != "datanode" && format != "tim" && format != "trix" {
return fmt.Errorf("invalid format: %s (must be 'datanode', 'tim', or 'trix')", format)
}
owner, err := parseGithubOwner(url)
if err != nil {
return err
}
repos, err := GithubClient.GetPublicRepos(cmd.Context(), owner)
if err != nil {
return err
}
allDataNodes := datanode.New()
prompter := ui.NewNonInteractivePrompter(ui.GetVCSQuote)
prompter.Start()
defer prompter.Stop()
var progressWriter io.Writer
if prompter.IsInteractive() {
bar := ui.NewProgressBar(len(repos), "Cloning repositories")
progressWriter = ui.NewProgressWriter(bar)
}
cloner := vcs.NewGitCloner()
allDataNodes := datanode.New()
for _, repoURL := range repos {
if collectCode {
var progressWriter io.Writer
if prompter.IsInteractive() {
bar := ui.NewProgressBar(-1, "Cloning repository")
progressWriter = ui.NewProgressWriter(bar)
}
cloner := vcs.NewGitCloner()
repoURL := fmt.Sprintf("https://github.com/%s/%s.git", owner, repo)
dn, err := cloner.CloneGitRepository(repoURL, progressWriter)
if err != nil {
// Log the error and continue
fmt.Fprintln(cmd.ErrOrStderr(), "Error cloning repository:", err)
continue
return fmt.Errorf("error cloning repository: %w", err)
}
// This is not an efficient way to merge datanodes, but it's the only way for now
// A better approach would be to add a Merge method to the DataNode
repoName := strings.TrimSuffix(repoURL, ".git")
parts := strings.Split(repoName, "/")
repoName = parts[len(parts)-1]
if mergeErr := mergeDataNodes(allDataNodes, dn, "code"); mergeErr != nil {
return fmt.Errorf("error merging code datanode: %w", mergeErr)
}
}
err = dn.Walk(".", func(path string, de fs.DirEntry, err error) error {
if err != nil {
return err
}
if !de.IsDir() {
err := func() error {
file, err := dn.Open(path)
if err != nil {
return err
}
defer file.Close()
data, err := io.ReadAll(file)
if err != nil {
return err
}
allDataNodes.AddData(repoName+"/"+path, data)
return nil
}()
if err != nil {
return err
}
}
return nil
})
client := github.NewGithubClient()
if collectIssues {
dn, err := client.GetIssues(cmd.Context(), owner, repo)
if err != nil {
fmt.Fprintln(cmd.ErrOrStderr(), "Error walking datanode:", err)
continue
return fmt.Errorf("error getting issues: %w", err)
}
if mergeErr := mergeDataNodes(allDataNodes, dn, ""); mergeErr != nil {
return fmt.Errorf("error merging issues datanode: %w", mergeErr)
}
}
if collectPRs {
dn, err := client.GetPullRequests(cmd.Context(), owner, repo)
if err != nil {
return fmt.Errorf("error getting pull requests: %w", err)
}
if mergeErr := mergeDataNodes(allDataNodes, dn, ""); mergeErr != nil {
return fmt.Errorf("error merging pull requests datanode: %w", mergeErr)
}
}
var data []byte
if format == "tim" {
tim, err := tim.FromDataNode(allDataNodes)
t, err := tim.FromDataNode(allDataNodes)
if err != nil {
return fmt.Errorf("error creating tim: %w", err)
}
data, err = tim.ToTar()
data, err = t.ToTar()
if err != nil {
return fmt.Errorf("error serializing tim: %w", err)
}
@ -130,49 +137,67 @@ func NewAllCmd() *cobra.Command {
return fmt.Errorf("error compressing data: %w", err)
}
if outputFile == "" {
outputFile = fmt.Sprintf("%s-all.%s", repo, format)
if compression != "none" {
outputFile += "." + compression
}
}
err = os.WriteFile(outputFile, compressedData, 0644)
if err != nil {
return fmt.Errorf("error writing DataNode to file: %w", err)
}
fmt.Fprintln(cmd.OutOrStdout(), "All repositories saved to", outputFile)
fmt.Fprintln(cmd.OutOrStdout(), "All resources saved to", outputFile)
return nil
},
}
allCmd.PersistentFlags().String("output", "all.dat", "Output file for the DataNode")
allCmd.PersistentFlags().String("format", "datanode", "Output format (datanode, tim, or trix)")
allCmd.PersistentFlags().String("compression", "none", "Compression format (none, gz, or xz)")
allCmd.PersistentFlags().String("password", "", "Password for encryption")
return allCmd
cmd.Flags().String("output", "", "Output file for the DataNode")
cmd.Flags().String("format", "datanode", "Output format (datanode, tim, or trix)")
cmd.Flags().String("compression", "none", "Compression format (none, gz, or xz)")
cmd.Flags().String("password", "", "Password for encryption")
cmd.Flags().Bool("issues", true, "Collect issues")
cmd.Flags().Bool("prs", true, "Collect pull requests")
cmd.Flags().Bool("code", true, "Collect code")
return cmd
}
func GetAllCmd() *cobra.Command {
return allCmd
func GetGithubAllCmd() *cobra.Command {
return githubAllCmd
}
func init() {
RootCmd.AddCommand(GetAllCmd())
collectGithubCmd.AddCommand(GetGithubAllCmd())
}
func parseGithubOwner(u string) (string, error) {
owner, _, err := github.ParseRepoFromURL(u)
if err == nil {
return owner, nil
}
parsedURL, err := url.Parse(u)
if err != nil {
return "", fmt.Errorf("invalid URL: %w", err)
}
path := strings.Trim(parsedURL.Path, "/")
if path == "" {
return "", fmt.Errorf("invalid owner URL: %s", u)
}
parts := strings.Split(path, "/")
if len(parts) != 1 || parts[0] == "" {
return "", fmt.Errorf("invalid owner URL: %s", u)
}
return parts[0], nil
func mergeDataNodes(dest *datanode.DataNode, src *datanode.DataNode, prefix string) error {
return src.Walk(".", func(path string, de fs.DirEntry, err error) error {
if err != nil {
return err
}
if !de.IsDir() {
err := func() error {
file, err := src.Open(path)
if err != nil {
return err
}
defer file.Close()
data, err := io.ReadAll(file)
if err != nil {
return err
}
destPath := path
if prefix != "" {
destPath = prefix + "/" + path
}
dest.AddData(destPath, data)
return nil
}()
if err != nil {
return err
}
}
return nil
})
}

View file

@ -8,9 +8,9 @@ import (
"path/filepath"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/github"
"forge.lthn.ai/Snider/Borg/pkg/mocks"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/mocks"
)
func TestAllCmd_Good(t *testing.T) {
@ -42,7 +42,7 @@ func TestAllCmd_Good(t *testing.T) {
}()
rootCmd := NewRootCmd()
rootCmd.AddCommand(GetAllCmd())
rootCmd.AddCommand(GetGithubAllCmd())
// Execute command
out := filepath.Join(t.TempDir(), "out")
@ -75,7 +75,7 @@ func TestAllCmd_Bad(t *testing.T) {
}()
rootCmd := NewRootCmd()
rootCmd.AddCommand(GetAllCmd())
rootCmd.AddCommand(GetGithubAllCmd())
// Execute command
out := filepath.Join(t.TempDir(), "out")
@ -104,7 +104,7 @@ func TestAllCmd_Ugly(t *testing.T) {
}()
rootCmd := NewRootCmd()
rootCmd.AddCommand(GetAllCmd())
rootCmd.AddCommand(GetGithubAllCmd())
// Execute command
out := filepath.Join(t.TempDir(), "out")

View file

@ -0,0 +1,84 @@
package cmd
import (
"fmt"
"os"
"strings"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/ui"
"github.com/spf13/cobra"
)
// NewCollectGithubIssuesCmd creates a new cobra command for collecting github issues.
func NewCollectGithubIssuesCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "issues <owner/repo>",
Short: "Collect issues from a GitHub repository",
Long: `Collect all issues from a GitHub repository and store them in a DataNode.`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
repoPath := args[0]
parts := strings.Split(repoPath, "/")
if len(parts) != 2 {
return fmt.Errorf("invalid repository path: %s (must be in the format <owner>/<repo>)", repoPath)
}
owner, repo := parts[0], parts[1]
outputFile, _ := cmd.Flags().GetString("output")
format, _ := cmd.Flags().GetString("format")
compression, _ := cmd.Flags().GetString("compression")
if format != "datanode" {
return fmt.Errorf("invalid format: %s (must be 'datanode')", format)
}
if compression != "none" && compression != "gz" && compression != "xz" {
return fmt.Errorf("invalid compression: %s (must be 'none', 'gz', or 'xz')", compression)
}
prompter := ui.NewNonInteractivePrompter(ui.GetVCSQuote)
prompter.Start()
defer prompter.Stop()
client := github.NewGithubClient()
dn, err := client.GetIssues(cmd.Context(), owner, repo)
if err != nil {
return fmt.Errorf("error getting issues: %w", err)
}
data, err := dn.ToTar()
if err != nil {
return fmt.Errorf("error serializing DataNode: %w", err)
}
compressedData, err := compress.Compress(data, compression)
if err != nil {
return fmt.Errorf("error compressing data: %w", err)
}
if outputFile == "" {
outputFile = "issues." + format
if compression != "none" {
outputFile += "." + compression
}
}
err = os.WriteFile(outputFile, compressedData, 0644)
if err != nil {
return fmt.Errorf("error writing DataNode to file: %w", err)
}
fmt.Fprintln(cmd.OutOrStdout(), "Issues saved to", outputFile)
return nil
},
}
cmd.Flags().String("output", "", "Output file for the DataNode")
cmd.Flags().String("format", "datanode", "Output format (datanode)")
cmd.Flags().String("compression", "none", "Compression format (none, gz, or xz)")
return cmd
}
func init() {
GetCollectGithubCmd().AddCommand(NewCollectGithubIssuesCmd())
}

View file

@ -0,0 +1,53 @@
package cmd
import (
"bytes"
"context"
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"testing"
"github.com/Snider/Borg/pkg/github"
"github.com/stretchr/testify/assert"
)
func TestCollectGithubIssuesCmd(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/repos/owner/repo/issues" {
w.Header().Set("Content-Type", "application/json")
issues := []github.Issue{
{Number: 1, Title: "Issue 1", CommentsURL: "http://" + r.Host + "/repos/owner/repo/issues/1/comments"},
}
json.NewEncoder(w).Encode(issues)
} else if r.URL.Path == "/repos/owner/repo/issues/1/comments" {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte("[]"))
} else {
http.NotFound(w, r)
}
}))
defer server.Close()
originalNewAuthenticatedClient := github.NewAuthenticatedClient
github.NewAuthenticatedClient = func(ctx context.Context) *http.Client {
return server.Client()
}
defer func() {
github.NewAuthenticatedClient = originalNewAuthenticatedClient
}()
cmd := NewCollectGithubIssuesCmd()
var out bytes.Buffer
cmd.SetOut(&out)
cmd.SetErr(&out)
cmd.SetArgs([]string{"owner/repo", "--output", "issues.dat"})
err := cmd.Execute()
assert.NoError(t, err)
_, err = os.Stat("issues.dat")
assert.NoError(t, err)
os.Remove("issues.dat")
}

84
cmd/collect_github_prs.go Normal file
View file

@ -0,0 +1,84 @@
package cmd
import (
"fmt"
"os"
"strings"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/ui"
"github.com/spf13/cobra"
)
// NewCollectGithubPrsCmd creates a new cobra command for collecting github pull requests.
func NewCollectGithubPrsCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "prs <owner/repo>",
Short: "Collect pull requests from a GitHub repository",
Long: `Collect all pull requests from a GitHub repository and store them in a DataNode.`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
repoPath := args[0]
parts := strings.Split(repoPath, "/")
if len(parts) != 2 {
return fmt.Errorf("invalid repository path: %s (must be in the format <owner>/<repo>)", repoPath)
}
owner, repo := parts[0], parts[1]
outputFile, _ := cmd.Flags().GetString("output")
format, _ := cmd.Flags().GetString("format")
compression, _ := cmd.Flags().GetString("compression")
if format != "datanode" {
return fmt.Errorf("invalid format: %s (must be 'datanode')", format)
}
if compression != "none" && compression != "gz" && compression != "xz" {
return fmt.Errorf("invalid compression: %s (must be 'none', 'gz', or 'xz')", compression)
}
prompter := ui.NewNonInteractivePrompter(ui.GetVCSQuote)
prompter.Start()
defer prompter.Stop()
client := github.NewGithubClient()
dn, err := client.GetPullRequests(cmd.Context(), owner, repo)
if err != nil {
return fmt.Errorf("error getting pull requests: %w", err)
}
data, err := dn.ToTar()
if err != nil {
return fmt.Errorf("error serializing DataNode: %w", err)
}
compressedData, err := compress.Compress(data, compression)
if err != nil {
return fmt.Errorf("error compressing data: %w", err)
}
if outputFile == "" {
outputFile = "prs." + format
if compression != "none" {
outputFile += "." + compression
}
}
err = os.WriteFile(outputFile, compressedData, 0644)
if err != nil {
return fmt.Errorf("error writing DataNode to file: %w", err)
}
fmt.Fprintln(cmd.OutOrStdout(), "Pull requests saved to", outputFile)
return nil
},
}
cmd.Flags().String("output", "", "Output file for the DataNode")
cmd.Flags().String("format", "datanode", "Output format (datanode)")
cmd.Flags().String("compression", "none", "Compression format (none, gz, or xz)")
return cmd
}
func init() {
GetCollectGithubCmd().AddCommand(NewCollectGithubPrsCmd())
}

View file

@ -0,0 +1,64 @@
package cmd
import (
"bytes"
"context"
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"testing"
"github.com/Snider/Borg/pkg/github"
"github.com/stretchr/testify/assert"
)
func TestCollectGithubPrsCmd(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/repos/owner/repo/pulls" {
w.Header().Set("Content-Type", "application/json")
prs := []github.PullRequest{
{
Number: 1, Title: "PR 1",
DiffURL: "http://" + r.Host + "/repos/owner/repo/pulls/1.diff",
Links: struct {
Comments struct{ Href string `json:"href"` } `json:"comments"`
ReviewComments struct{ Href string `json:"href"` } `json:"review_comments"`
}{
ReviewComments: struct{ Href string `json:"href"` }{Href: "http://" + r.Host + "/repos/owner/repo/pulls/1/comments"},
},
},
}
json.NewEncoder(w).Encode(prs)
} else if r.URL.Path == "/repos/owner/repo/pulls/1.diff" {
w.Write([]byte("diff --git a/file b/file"))
} else if r.URL.Path == "/repos/owner/repo/pulls/1/comments" {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte("[]"))
} else {
http.NotFound(w, r)
}
}))
defer server.Close()
originalNewAuthenticatedClient := github.NewAuthenticatedClient
github.NewAuthenticatedClient = func(ctx context.Context) *http.Client {
return server.Client()
}
defer func() {
github.NewAuthenticatedClient = originalNewAuthenticatedClient
}()
cmd := NewCollectGithubPrsCmd()
var out bytes.Buffer
cmd.SetOut(&out)
cmd.SetErr(&out)
cmd.SetArgs([]string{"owner/repo", "--output", "prs.dat"})
err := cmd.Execute()
assert.NoError(t, err)
_, err = os.Stat("prs.dat")
assert.NoError(t, err)
os.Remove("prs.dat")
}

View file

@ -7,8 +7,8 @@ import (
"os"
"path/filepath"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
borg_github "forge.lthn.ai/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/datanode"
borg_github "github.com/Snider/Borg/pkg/github"
"github.com/google/go-github/v39/github"
"github.com/spf13/cobra"
"golang.org/x/mod/semver"

View file

@ -5,11 +5,11 @@ import (
"io"
"os"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"forge.lthn.ai/Snider/Borg/pkg/trix"
"forge.lthn.ai/Snider/Borg/pkg/ui"
"forge.lthn.ai/Snider/Borg/pkg/vcs"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/trix"
"github.com/Snider/Borg/pkg/ui"
"github.com/Snider/Borg/pkg/vcs"
"github.com/spf13/cobra"
)

View file

@ -5,8 +5,8 @@ import (
"path/filepath"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/mocks"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/mocks"
)
func TestCollectGithubRepoCmd_Good(t *testing.T) {

View file

@ -3,7 +3,7 @@ package cmd
import (
"fmt"
"forge.lthn.ai/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/github"
"github.com/spf13/cobra"
)

View file

@ -1,581 +0,0 @@
package cmd
import (
"archive/tar"
"bytes"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"strings"
"sync"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"forge.lthn.ai/Snider/Borg/pkg/trix"
"forge.lthn.ai/Snider/Borg/pkg/ui"
"github.com/spf13/cobra"
)
type CollectLocalCmd struct {
cobra.Command
}
// NewCollectLocalCmd creates a new collect local command
func NewCollectLocalCmd() *CollectLocalCmd {
c := &CollectLocalCmd{}
c.Command = cobra.Command{
Use: "local [directory]",
Short: "Collect files from a local directory",
Long: `Collect local files into a portable container.
For STIM format, uses streaming I/O memory usage is constant
(~2 MiB) regardless of input directory size. Other formats
(datanode, tim, trix) load files into memory.
Examples:
borg collect local
borg collect local ./src
borg collect local /path/to/project --output project.tar
borg collect local . --format stim --password secret
borg collect local . --exclude "*.log" --exclude "node_modules"`,
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
directory := "."
if len(args) > 0 {
directory = args[0]
}
outputFile, _ := cmd.Flags().GetString("output")
format, _ := cmd.Flags().GetString("format")
compression, _ := cmd.Flags().GetString("compression")
password, _ := cmd.Flags().GetString("password")
excludes, _ := cmd.Flags().GetStringSlice("exclude")
includeHidden, _ := cmd.Flags().GetBool("hidden")
respectGitignore, _ := cmd.Flags().GetBool("gitignore")
progress := ProgressFromCmd(cmd)
finalPath, err := CollectLocal(directory, outputFile, format, compression, password, excludes, includeHidden, respectGitignore, progress)
if err != nil {
return err
}
fmt.Fprintln(cmd.OutOrStdout(), "Files saved to", finalPath)
return nil
},
}
c.Flags().String("output", "", "Output file for the DataNode")
c.Flags().String("format", "datanode", "Output format (datanode, tim, trix, or stim)")
c.Flags().String("compression", "none", "Compression format (none, gz, or xz)")
c.Flags().String("password", "", "Password for encryption (required for stim/trix format)")
c.Flags().StringSlice("exclude", nil, "Patterns to exclude (can be specified multiple times)")
c.Flags().Bool("hidden", false, "Include hidden files and directories")
c.Flags().Bool("gitignore", true, "Respect .gitignore files (default: true)")
return c
}
func init() {
collectCmd.AddCommand(&NewCollectLocalCmd().Command)
}
// CollectLocal collects files from a local directory into a DataNode
func CollectLocal(directory string, outputFile string, format string, compression string, password string, excludes []string, includeHidden bool, respectGitignore bool, progress ui.Progress) (string, error) {
// Validate format
if format != "datanode" && format != "tim" && format != "trix" && format != "stim" {
return "", fmt.Errorf("invalid format: %s (must be 'datanode', 'tim', 'trix', or 'stim')", format)
}
if (format == "stim" || format == "trix") && password == "" {
return "", fmt.Errorf("password is required for %s format", format)
}
if compression != "none" && compression != "gz" && compression != "xz" {
return "", fmt.Errorf("invalid compression: %s (must be 'none', 'gz', or 'xz')", compression)
}
// Resolve directory path
absDir, err := filepath.Abs(directory)
if err != nil {
return "", fmt.Errorf("error resolving directory path: %w", err)
}
info, err := os.Stat(absDir)
if err != nil {
return "", fmt.Errorf("error accessing directory: %w", err)
}
if !info.IsDir() {
return "", fmt.Errorf("not a directory: %s", absDir)
}
// Use streaming pipeline for STIM v2 format
if format == "stim" {
if outputFile == "" {
baseName := filepath.Base(absDir)
if baseName == "." || baseName == "/" {
baseName = "local"
}
outputFile = baseName + ".stim"
}
if err := CollectLocalStreaming(absDir, outputFile, compression, password); err != nil {
return "", err
}
return outputFile, nil
}
// Load gitignore patterns if enabled
var gitignorePatterns []string
if respectGitignore {
gitignorePatterns = loadGitignore(absDir)
}
// Create DataNode and collect files
dn := datanode.New()
var fileCount int
progress.Start("collecting " + directory)
err = filepath.WalkDir(absDir, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
// Get relative path
relPath, err := filepath.Rel(absDir, path)
if err != nil {
return err
}
// Skip root
if relPath == "." {
return nil
}
// Skip hidden files/dirs unless explicitly included
if !includeHidden && isHidden(relPath) {
if d.IsDir() {
return filepath.SkipDir
}
return nil
}
// Check gitignore patterns
if respectGitignore && matchesGitignore(relPath, d.IsDir(), gitignorePatterns) {
if d.IsDir() {
return filepath.SkipDir
}
return nil
}
// Check exclude patterns
if matchesExclude(relPath, excludes) {
if d.IsDir() {
return filepath.SkipDir
}
return nil
}
// Skip directories (they're implicit in DataNode)
if d.IsDir() {
return nil
}
// Read file content
content, err := os.ReadFile(path)
if err != nil {
return fmt.Errorf("error reading %s: %w", relPath, err)
}
// Add to DataNode with forward slashes (tar convention)
dn.AddData(filepath.ToSlash(relPath), content)
fileCount++
progress.Update(int64(fileCount), 0)
return nil
})
if err != nil {
return "", fmt.Errorf("error walking directory: %w", err)
}
if fileCount == 0 {
return "", fmt.Errorf("no files found in %s", directory)
}
progress.Finish(fmt.Sprintf("collected %d files", fileCount))
// Convert to output format
var data []byte
if format == "tim" {
t, err := tim.FromDataNode(dn)
if err != nil {
return "", fmt.Errorf("error creating tim: %w", err)
}
data, err = t.ToTar()
if err != nil {
return "", fmt.Errorf("error serializing tim: %w", err)
}
} else if format == "stim" {
t, err := tim.FromDataNode(dn)
if err != nil {
return "", fmt.Errorf("error creating tim: %w", err)
}
data, err = t.ToSigil(password)
if err != nil {
return "", fmt.Errorf("error encrypting stim: %w", err)
}
} else if format == "trix" {
data, err = trix.ToTrix(dn, password)
if err != nil {
return "", fmt.Errorf("error serializing trix: %w", err)
}
} else {
data, err = dn.ToTar()
if err != nil {
return "", fmt.Errorf("error serializing DataNode: %w", err)
}
}
// Apply compression
compressedData, err := compress.Compress(data, compression)
if err != nil {
return "", fmt.Errorf("error compressing data: %w", err)
}
// Determine output filename
if outputFile == "" {
baseName := filepath.Base(absDir)
if baseName == "." || baseName == "/" {
baseName = "local"
}
outputFile = baseName + "." + format
if compression != "none" {
outputFile += "." + compression
}
}
err = os.WriteFile(outputFile, compressedData, 0644)
if err != nil {
return "", fmt.Errorf("error writing output file: %w", err)
}
return outputFile, nil
}
// isHidden checks if a path component starts with a dot
func isHidden(path string) bool {
parts := strings.Split(filepath.ToSlash(path), "/")
for _, part := range parts {
if strings.HasPrefix(part, ".") {
return true
}
}
return false
}
// loadGitignore loads patterns from .gitignore if it exists
func loadGitignore(dir string) []string {
var patterns []string
gitignorePath := filepath.Join(dir, ".gitignore")
content, err := os.ReadFile(gitignorePath)
if err != nil {
return patterns
}
lines := strings.Split(string(content), "\n")
for _, line := range lines {
line = strings.TrimSpace(line)
// Skip empty lines and comments
if line == "" || strings.HasPrefix(line, "#") {
continue
}
patterns = append(patterns, line)
}
return patterns
}
// matchesGitignore checks if a path matches any gitignore pattern
func matchesGitignore(path string, isDir bool, patterns []string) bool {
for _, pattern := range patterns {
// Handle directory-only patterns
if strings.HasSuffix(pattern, "/") {
if !isDir {
continue
}
pattern = strings.TrimSuffix(pattern, "/")
}
// Handle negation (simplified - just skip negated patterns)
if strings.HasPrefix(pattern, "!") {
continue
}
// Match against path components
matched, _ := filepath.Match(pattern, filepath.Base(path))
if matched {
return true
}
// Also try matching the full path
matched, _ = filepath.Match(pattern, path)
if matched {
return true
}
// Handle ** patterns (simplified)
if strings.Contains(pattern, "**") {
simplePattern := strings.ReplaceAll(pattern, "**", "*")
matched, _ = filepath.Match(simplePattern, path)
if matched {
return true
}
}
}
return false
}
// matchesExclude checks if a path matches any exclude pattern
func matchesExclude(path string, excludes []string) bool {
for _, pattern := range excludes {
// Match against basename
matched, _ := filepath.Match(pattern, filepath.Base(path))
if matched {
return true
}
// Match against full path
matched, _ = filepath.Match(pattern, path)
if matched {
return true
}
}
return false
}
// CollectLocalStreaming collects files from a local directory using a streaming
// pipeline: walk -> tar -> compress -> encrypt -> file.
// The encryption runs in a goroutine, consuming from an io.Pipe that the
// tar/compress writes feed into synchronously.
func CollectLocalStreaming(dir, output, compression, password string) error {
// Resolve to absolute path
absDir, err := filepath.Abs(dir)
if err != nil {
return fmt.Errorf("error resolving directory path: %w", err)
}
// Validate directory exists
info, err := os.Stat(absDir)
if err != nil {
return fmt.Errorf("error accessing directory: %w", err)
}
if !info.IsDir() {
return fmt.Errorf("not a directory: %s", absDir)
}
// Create output file
outFile, err := os.Create(output)
if err != nil {
return fmt.Errorf("error creating output file: %w", err)
}
// cleanup removes partial output on error
cleanup := func() {
outFile.Close()
os.Remove(output)
}
// Build streaming pipeline:
// tar.Writer -> compressWriter -> pipeWriter -> pipeReader -> StreamEncrypt -> outFile
pr, pw := io.Pipe()
// Start encryption goroutine
var encErr error
var wg sync.WaitGroup
wg.Add(1)
go func() {
defer wg.Done()
encErr = tim.StreamEncrypt(pr, outFile, password)
}()
// Create compression writer wrapping the pipe writer
compWriter, err := compress.NewCompressWriter(pw, compression)
if err != nil {
pw.Close()
wg.Wait()
cleanup()
return fmt.Errorf("error creating compression writer: %w", err)
}
// Create tar writer wrapping the compression writer
tw := tar.NewWriter(compWriter)
// Walk directory and write tar entries
walkErr := filepath.WalkDir(absDir, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
// Get relative path
relPath, err := filepath.Rel(absDir, path)
if err != nil {
return err
}
// Skip root
if relPath == "." {
return nil
}
// Normalize to forward slashes for tar
relPath = filepath.ToSlash(relPath)
// Check if entry is a symlink using Lstat
linfo, err := os.Lstat(path)
if err != nil {
return err
}
isSymlink := linfo.Mode()&fs.ModeSymlink != 0
if isSymlink {
// Read symlink target
linkTarget, err := os.Readlink(path)
if err != nil {
return err
}
// Resolve to check if target exists
absTarget := linkTarget
if !filepath.IsAbs(absTarget) {
absTarget = filepath.Join(filepath.Dir(path), linkTarget)
}
_, statErr := os.Stat(absTarget)
if statErr != nil {
// Broken symlink - skip silently
return nil
}
// Write valid symlink as tar entry
hdr := &tar.Header{
Typeflag: tar.TypeSymlink,
Name: relPath,
Linkname: linkTarget,
Mode: 0777,
}
return tw.WriteHeader(hdr)
}
if d.IsDir() {
// Write directory header
hdr := &tar.Header{
Typeflag: tar.TypeDir,
Name: relPath + "/",
Mode: 0755,
}
return tw.WriteHeader(hdr)
}
// Regular file: write header + content
finfo, err := d.Info()
if err != nil {
return err
}
hdr := &tar.Header{
Name: relPath,
Mode: 0644,
Size: finfo.Size(),
}
if err := tw.WriteHeader(hdr); err != nil {
return err
}
f, err := os.Open(path)
if err != nil {
return fmt.Errorf("error opening %s: %w", relPath, err)
}
defer f.Close()
if _, err := io.Copy(tw, f); err != nil {
return fmt.Errorf("error streaming %s: %w", relPath, err)
}
return nil
})
// Close pipeline layers in order: tar -> compress -> pipe
// We must close even on error to unblock the encryption goroutine.
twCloseErr := tw.Close()
compCloseErr := compWriter.Close()
if walkErr != nil {
pw.CloseWithError(walkErr)
wg.Wait()
cleanup()
return fmt.Errorf("error walking directory: %w", walkErr)
}
if twCloseErr != nil {
pw.CloseWithError(twCloseErr)
wg.Wait()
cleanup()
return fmt.Errorf("error closing tar writer: %w", twCloseErr)
}
if compCloseErr != nil {
pw.CloseWithError(compCloseErr)
wg.Wait()
cleanup()
return fmt.Errorf("error closing compression writer: %w", compCloseErr)
}
// Signal EOF to encryption goroutine
pw.Close()
// Wait for encryption to finish
wg.Wait()
if encErr != nil {
cleanup()
return fmt.Errorf("error encrypting data: %w", encErr)
}
// Close output file
if err := outFile.Close(); err != nil {
os.Remove(output)
return fmt.Errorf("error closing output file: %w", err)
}
return nil
}
// DecryptStimV2 decrypts a STIM v2 file back into a DataNode.
// It opens the file, runs StreamDecrypt, decompresses the result,
// and parses the tar archive into a DataNode.
func DecryptStimV2(path, password string) (*datanode.DataNode, error) {
f, err := os.Open(path)
if err != nil {
return nil, fmt.Errorf("error opening file: %w", err)
}
defer f.Close()
// Decrypt
var decrypted bytes.Buffer
if err := tim.StreamDecrypt(f, &decrypted, password); err != nil {
return nil, fmt.Errorf("error decrypting: %w", err)
}
// Decompress
decompressed, err := compress.Decompress(decrypted.Bytes())
if err != nil {
return nil, fmt.Errorf("error decompressing: %w", err)
}
// Parse tar into DataNode
dn, err := datanode.FromTar(decompressed)
if err != nil {
return nil, fmt.Errorf("error parsing tar: %w", err)
}
return dn, nil
}

View file

@ -1,161 +0,0 @@
package cmd
import (
"os"
"path/filepath"
"testing"
)
func TestCollectLocalStreaming_Good(t *testing.T) {
// Create a temp directory with some test files
srcDir := t.TempDir()
outDir := t.TempDir()
// Create files in subdirectories
subDir := filepath.Join(srcDir, "subdir")
if err := os.MkdirAll(subDir, 0755); err != nil {
t.Fatalf("failed to create subdir: %v", err)
}
files := map[string]string{
"hello.txt": "hello world",
"subdir/nested.go": "package main\n",
}
for name, content := range files {
path := filepath.Join(srcDir, name)
if err := os.WriteFile(path, []byte(content), 0644); err != nil {
t.Fatalf("failed to write %s: %v", name, err)
}
}
output := filepath.Join(outDir, "test.stim")
err := CollectLocalStreaming(srcDir, output, "gz", "test-password")
if err != nil {
t.Fatalf("CollectLocalStreaming() error = %v", err)
}
// Verify file exists and is non-empty
info, err := os.Stat(output)
if err != nil {
t.Fatalf("output file does not exist: %v", err)
}
if info.Size() == 0 {
t.Fatal("output file is empty")
}
}
func TestCollectLocalStreaming_Decrypt_Good(t *testing.T) {
// Create a temp directory with known files
srcDir := t.TempDir()
outDir := t.TempDir()
subDir := filepath.Join(srcDir, "pkg")
if err := os.MkdirAll(subDir, 0755); err != nil {
t.Fatalf("failed to create subdir: %v", err)
}
expectedFiles := map[string]string{
"README.md": "# Test Project\n",
"pkg/main.go": "package main\n\nfunc main() {}\n",
}
for name, content := range expectedFiles {
path := filepath.Join(srcDir, name)
if err := os.WriteFile(path, []byte(content), 0644); err != nil {
t.Fatalf("failed to write %s: %v", name, err)
}
}
password := "decrypt-test-pw"
output := filepath.Join(outDir, "roundtrip.stim")
// Collect
err := CollectLocalStreaming(srcDir, output, "gz", password)
if err != nil {
t.Fatalf("CollectLocalStreaming() error = %v", err)
}
// Decrypt
dn, err := DecryptStimV2(output, password)
if err != nil {
t.Fatalf("DecryptStimV2() error = %v", err)
}
// Verify each expected file exists in the DataNode
for name, wantContent := range expectedFiles {
f, err := dn.Open(name)
if err != nil {
t.Errorf("file %q not found in DataNode: %v", name, err)
continue
}
buf := make([]byte, 4096)
n, _ := f.Read(buf)
f.Close()
got := string(buf[:n])
if got != wantContent {
t.Errorf("file %q content mismatch:\n got: %q\n want: %q", name, got, wantContent)
}
}
}
func TestCollectLocalStreaming_BrokenSymlink_Good(t *testing.T) {
srcDir := t.TempDir()
outDir := t.TempDir()
// Create a regular file
if err := os.WriteFile(filepath.Join(srcDir, "real.txt"), []byte("I exist"), 0644); err != nil {
t.Fatalf("failed to write real.txt: %v", err)
}
// Create a broken symlink pointing to a nonexistent target
brokenLink := filepath.Join(srcDir, "broken-link")
if err := os.Symlink("/nonexistent/target/file", brokenLink); err != nil {
t.Fatalf("failed to create broken symlink: %v", err)
}
output := filepath.Join(outDir, "symlink.stim")
err := CollectLocalStreaming(srcDir, output, "none", "sym-password")
if err != nil {
t.Fatalf("CollectLocalStreaming() should skip broken symlinks, got error = %v", err)
}
// Verify output exists and is non-empty
info, err := os.Stat(output)
if err != nil {
t.Fatalf("output file does not exist: %v", err)
}
if info.Size() == 0 {
t.Fatal("output file is empty")
}
// Decrypt and verify the broken symlink was skipped
dn, err := DecryptStimV2(output, "sym-password")
if err != nil {
t.Fatalf("DecryptStimV2() error = %v", err)
}
// real.txt should be present
if _, err := dn.Stat("real.txt"); err != nil {
t.Error("expected real.txt in DataNode but it's missing")
}
// broken-link should NOT be present
exists, _ := dn.Exists("broken-link")
if exists {
t.Error("broken symlink should have been skipped but was found in DataNode")
}
}
func TestCollectLocalStreaming_Bad(t *testing.T) {
outDir := t.TempDir()
output := filepath.Join(outDir, "should-not-exist.stim")
err := CollectLocalStreaming("/nonexistent/path/that/does/not/exist", output, "none", "password")
if err == nil {
t.Fatal("expected error for nonexistent directory, got nil")
}
// Verify no partial output file was left behind
if _, statErr := os.Stat(output); statErr == nil {
t.Error("partial output file should have been cleaned up")
}
}

View file

@ -4,11 +4,11 @@ import (
"fmt"
"os"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/pwa"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"forge.lthn.ai/Snider/Borg/pkg/trix"
"forge.lthn.ai/Snider/Borg/pkg/ui"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/pwa"
"github.com/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/trix"
"github.com/Snider/Borg/pkg/ui"
"github.com/spf13/cobra"
)

View file

@ -5,11 +5,11 @@ import (
"os"
"github.com/schollz/progressbar/v3"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"forge.lthn.ai/Snider/Borg/pkg/trix"
"forge.lthn.ai/Snider/Borg/pkg/ui"
"forge.lthn.ai/Snider/Borg/pkg/website"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/trix"
"github.com/Snider/Borg/pkg/ui"
"github.com/Snider/Borg/pkg/website"
"github.com/spf13/cobra"
)

View file

@ -6,8 +6,8 @@ import (
"strings"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/website"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/website"
"github.com/schollz/progressbar/v3"
)

View file

@ -5,7 +5,7 @@ import (
"os"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/tim"
"github.com/spf13/cobra"
)

View file

@ -5,8 +5,8 @@ import (
"os"
"path/filepath"
"forge.lthn.ai/Snider/Borg/pkg/console"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/console"
"github.com/Snider/Borg/pkg/tim"
"github.com/spf13/cobra"
)

View file

@ -1,17 +0,0 @@
package cmd
import (
"os"
"forge.lthn.ai/Snider/Borg/pkg/ui"
"github.com/spf13/cobra"
)
// ProgressFromCmd returns a Progress based on --quiet flag and TTY detection.
func ProgressFromCmd(cmd *cobra.Command) ui.Progress {
quiet, _ := cmd.Flags().GetBool("quiet")
if quiet {
return ui.NewQuietProgress(os.Stderr)
}
return ui.DefaultProgress()
}

View file

@ -1,28 +0,0 @@
package cmd
import (
"testing"
"github.com/spf13/cobra"
)
func TestProgressFromCmd_Good(t *testing.T) {
cmd := &cobra.Command{}
cmd.PersistentFlags().BoolP("quiet", "q", false, "")
p := ProgressFromCmd(cmd)
if p == nil {
t.Fatal("expected non-nil Progress")
}
}
func TestProgressFromCmd_Quiet_Good(t *testing.T) {
cmd := &cobra.Command{}
cmd.PersistentFlags().BoolP("quiet", "q", true, "")
_ = cmd.PersistentFlags().Set("quiet", "true")
p := ProgressFromCmd(cmd)
if p == nil {
t.Fatal("expected non-nil Progress")
}
}

View file

@ -15,8 +15,8 @@ import (
"sync"
"time"
"forge.lthn.ai/Snider/Borg/pkg/player"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/player"
"github.com/Snider/Borg/pkg/smsg"
"github.com/wailsapp/wails/v2"
"github.com/wailsapp/wails/v2/pkg/options"
"github.com/wailsapp/wails/v2/pkg/options/assetserver"

View file

@ -6,7 +6,7 @@ import (
"fmt"
"os"
"forge.lthn.ai/Snider/Borg/pkg/player"
"github.com/Snider/Borg/pkg/player"
"github.com/spf13/cobra"
)

View file

@ -5,9 +5,9 @@ import (
"os"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"forge.lthn.ai/Snider/Borg/pkg/trix"
trixsdk "forge.lthn.ai/Snider/Enchantrix/pkg/trix"
"github.com/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/trix"
trixsdk "github.com/Snider/Enchantrix/pkg/trix"
"github.com/spf13/cobra"
)

View file

@ -5,8 +5,8 @@ import (
"path/filepath"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/trix"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/trix"
)
func TestDecodeCmd(t *testing.T) {

View file

@ -6,7 +6,7 @@ import (
"fmt"
"os"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/smsg"
)
func main() {

View file

@ -6,7 +6,7 @@ import (
"os"
"strings"
trixsdk "forge.lthn.ai/Snider/Enchantrix/pkg/trix"
trixsdk "github.com/Snider/Enchantrix/pkg/trix"
"github.com/spf13/cobra"
)

View file

@ -1,194 +0,0 @@
package cmd
import (
"bytes"
"io"
"os"
"path/filepath"
"testing"
)
// TestFullPipeline_Good exercises the complete streaming pipeline end-to-end
// with realistic directory contents including nested dirs, a large file that
// crosses the AEAD block boundary, valid and broken symlinks, and a hidden file.
// Each compression mode (none, gz, xz) is tested as a subtest.
func TestFullPipeline_Good(t *testing.T) {
if testing.Short() {
t.Skip("skipping integration test in short mode")
}
// Build a realistic source directory.
srcDir := t.TempDir()
// Regular files at root level.
writeFile(t, srcDir, "readme.md", "# My Project\n\nA description.\n")
writeFile(t, srcDir, "config.json", `{"version":"1.0","debug":false}`)
// Nested directories with source code.
mkdirAll(t, srcDir, "src")
mkdirAll(t, srcDir, "src/pkg")
writeFile(t, srcDir, "src/main.go", "package main\n\nimport \"fmt\"\n\nfunc main() {\n\tfmt.Println(\"hello\")\n}\n")
writeFile(t, srcDir, "src/pkg/lib.go", "package pkg\n\n// Lib is a library function.\nfunc Lib() string { return \"lib\" }\n")
// Large file: 1 MiB + 1 byte — crosses the 64 KiB block boundary used by
// the chunked AEAD streaming encryption. Fill with a deterministic pattern
// so we can verify content after round-trip.
const largeSize = 1024*1024 + 1
largeContent := make([]byte, largeSize)
for i := range largeContent {
largeContent[i] = byte(i % 251) // prime mod for non-trivial pattern
}
writeFileBytes(t, srcDir, "large.bin", largeContent)
// Valid symlink pointing at a relative target.
if err := os.Symlink("readme.md", filepath.Join(srcDir, "link-to-readme")); err != nil {
t.Fatalf("failed to create valid symlink: %v", err)
}
// Broken symlink pointing at a nonexistent absolute path.
if err := os.Symlink("/nonexistent/target", filepath.Join(srcDir, "broken-link")); err != nil {
t.Fatalf("failed to create broken symlink: %v", err)
}
// Hidden file (dot-prefixed).
writeFile(t, srcDir, ".hidden", "secret stuff\n")
// Run each compression mode as a subtest.
modes := []string{"none", "gz", "xz"}
for _, comp := range modes {
comp := comp // capture
t.Run("compression="+comp, func(t *testing.T) {
outDir := t.TempDir()
outFile := filepath.Join(outDir, "pipeline-"+comp+".stim")
password := "integration-test-pw-" + comp
// Step 1: Collect (walk -> tar -> compress -> encrypt -> file).
if err := CollectLocalStreaming(srcDir, outFile, comp, password); err != nil {
t.Fatalf("CollectLocalStreaming(%q) error = %v", comp, err)
}
// Step 2: Verify output exists and is non-empty.
info, err := os.Stat(outFile)
if err != nil {
t.Fatalf("output file does not exist: %v", err)
}
if info.Size() == 0 {
t.Fatal("output file is empty")
}
// Step 3: Decrypt back into a DataNode.
dn, err := DecryptStimV2(outFile, password)
if err != nil {
t.Fatalf("DecryptStimV2() error = %v", err)
}
// Step 4: Verify all regular files exist in the DataNode.
expectedFiles := []string{
"readme.md",
"config.json",
"src/main.go",
"src/pkg/lib.go",
"large.bin",
".hidden",
}
for _, name := range expectedFiles {
exists, eerr := dn.Exists(name)
if eerr != nil {
t.Errorf("Exists(%q) error = %v", name, eerr)
continue
}
if !exists {
t.Errorf("expected file %q in DataNode but it is missing", name)
}
}
// Verify the valid symlink was included.
linkExists, _ := dn.Exists("link-to-readme")
if !linkExists {
t.Error("expected symlink link-to-readme in DataNode but it is missing")
}
// Step 5: Verify large file has correct content (first byte check).
f, err := dn.Open("large.bin")
if err != nil {
t.Fatalf("Open(large.bin) error = %v", err)
}
defer f.Close()
// Read the entire large file and verify size and first byte.
allData, err := io.ReadAll(f)
if err != nil {
t.Fatalf("reading large.bin: %v", err)
}
if len(allData) != largeSize {
t.Errorf("large.bin size = %d, want %d", len(allData), largeSize)
}
if len(allData) > 0 && allData[0] != byte(0%251) {
t.Errorf("large.bin first byte = %d, want %d", allData[0], byte(0%251))
}
// Verify content integrity of the whole large file.
if !bytes.Equal(allData, largeContent) {
t.Error("large.bin content does not match original after round-trip")
}
// Step 6: Verify broken symlink was skipped.
brokenExists, _ := dn.Exists("broken-link")
if brokenExists {
t.Error("broken symlink should have been skipped but was found in DataNode")
}
})
}
}
// TestFullPipeline_WrongPassword_Bad encrypts with one password and attempts
// to decrypt with a different password, verifying that an error is returned.
func TestFullPipeline_WrongPassword_Bad(t *testing.T) {
if testing.Short() {
t.Skip("skipping integration test in short mode")
}
srcDir := t.TempDir()
outDir := t.TempDir()
writeFile(t, srcDir, "secret.txt", "this is confidential\n")
outFile := filepath.Join(outDir, "wrong-pw.stim")
// Encrypt with the correct password.
if err := CollectLocalStreaming(srcDir, outFile, "none", "correct-password"); err != nil {
t.Fatalf("CollectLocalStreaming() error = %v", err)
}
// Attempt to decrypt with the wrong password.
_, err := DecryptStimV2(outFile, "wrong-password")
if err == nil {
t.Fatal("expected error when decrypting with wrong password, got nil")
}
}
// --- helpers ---
func writeFile(t *testing.T, base, rel, content string) {
t.Helper()
path := filepath.Join(base, rel)
if err := os.WriteFile(path, []byte(content), 0644); err != nil {
t.Fatalf("failed to write %s: %v", rel, err)
}
}
func writeFileBytes(t *testing.T, base, rel string, data []byte) {
t.Helper()
path := filepath.Join(base, rel)
if err := os.WriteFile(path, data, 0644); err != nil {
t.Fatalf("failed to write %s: %v", rel, err)
}
}
func mkdirAll(t *testing.T, base, rel string) {
t.Helper()
path := filepath.Join(base, rel)
if err := os.MkdirAll(path, 0755); err != nil {
t.Fatalf("failed to mkdir %s: %v", rel, err)
}
}

View file

@ -21,7 +21,7 @@ import (
"path/filepath"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/smsg"
)
// Preset defines a quality level for transcoding

View file

@ -8,7 +8,7 @@ import (
"os"
"path/filepath"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/smsg"
)
func main() {

View file

@ -8,7 +8,7 @@ import (
"os"
"path/filepath"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/smsg"
)
func main() {

View file

@ -16,7 +16,6 @@ packaging their contents into a single file, and managing the data within.`,
}
rootCmd.PersistentFlags().BoolP("verbose", "v", false, "Enable verbose logging")
rootCmd.PersistentFlags().BoolP("quiet", "q", false, "Suppress non-error output")
return rootCmd
}

View file

@ -4,7 +4,7 @@ import (
"os"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/tim"
"github.com/spf13/cobra"
)

View file

@ -7,7 +7,7 @@ import (
"path/filepath"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/tim"
)
func TestRunCmd_Good(t *testing.T) {

View file

@ -6,9 +6,9 @@ import (
"os"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/tarfs"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/tarfs"
"github.com/spf13/cobra"
)

View file

@ -1,209 +0,0 @@
# Borg Production Backup Upgrade — Design Document
**Date:** 2026-02-21
**Status:** Implemented
**Approach:** Bottom-Up Refactor
## Problem Statement
Borg's `collect local` command fails on large directories because DataNode loads
everything into RAM. The UI spinner floods non-TTY output. Broken symlinks crash
the collection pipeline. Key derivation uses bare SHA-256. These issues prevent
Borg from being used for production backup workflows.
## Goals
1. Make `collect local` work reliably on large directories (10GB+)
2. Handle symlinks properly (skip broken, follow/store valid)
3. Add quiet/scripted mode for cron and pipeline use
4. Harden encryption key derivation (Argon2id)
5. Clean up the library for external consumers
## Non-Goals
- Full core/go-* package integration (deferred — circular dependency risk since
core imports Borg)
- New CLI commands beyond fixing existing ones
- Network transport or remote sync features
- GUI or web interface
## Architecture
### Current Flow (Broken for Large Dirs)
```
Walk directory → Load ALL files into DataNode (RAM) → Compress → Encrypt → Write
```
### New Flow (Streaming)
```
Walk directory → tar.Writer stream → compress stream → chunked encrypt → output file
```
DataNode remains THE core abstraction — the I/O sandbox that keeps everything safe
and portable. The streaming path bypasses DataNode for the `collect local` pipeline
only, while DataNode continues to serve all other use cases (programmatic access,
format conversion, inspection).
## Design Sections
### 1. DataNode Refactor
DataNode gains a `ToTarWriter(w io.Writer)` method for streaming out its contents
without buffering the entire archive. This is the bridge between DataNode's sandbox
model and streaming I/O.
New symlink handling:
| Symlink State | Behaviour |
|---------------|-----------|
| Valid, points inside DataNode root | Store as symlink entry |
| Valid, points outside DataNode root | Follow and store target content |
| Broken (dangling) | Skip with warning (configurable via `SkipBrokenSymlinks`) |
The `AddPath` method gets an options struct:
```go
type AddPathOptions struct {
SkipBrokenSymlinks bool // default: true
FollowSymlinks bool // default: false (store as symlinks)
ExcludePatterns []string
}
```
### 2. UI & Logger Cleanup
Replace direct spinner writes with a `Progress` interface:
```go
type Progress interface {
Start(label string)
Update(current, total int64)
Finish(label string)
Log(level, msg string, args ...any)
}
```
Two implementations:
- **InteractiveProgress** — spinner + progress bar (when `isatty(stdout)`)
- **QuietProgress** — structured log lines only (cron, pipes, `--quiet` flag)
TTY detection at startup selects the implementation. All existing `ui.Spinner` and
`fmt.Printf` calls in library code get replaced with `Progress` method calls.
New `--quiet` / `-q` flag on all commands suppresses non-error output.
### 3. TIM Streaming Encryption
ChaCha20-Poly1305 is AEAD — it needs the full plaintext to compute the auth tag.
For streaming, we use a chunked block format:
```
[magic: 4 bytes "STIM"]
[version: 1 byte]
[salt: 16 bytes] ← Argon2id salt
[argon2 params: 12 bytes] ← time, memory, threads (uint32 LE each)
Per block (repeated):
[nonce: 12 bytes]
[length: 4 bytes LE] ← ciphertext length including 16-byte Poly1305 tag
[ciphertext: N bytes] ← encrypted chunk + tag
Final block:
[nonce: 12 bytes]
[length: 4 bytes LE = 0] ← zero length signals EOF
```
Block size: 1 MiB plaintext → ~1 MiB + 16 bytes ciphertext per block.
The `Sigil` (Enchantrix crypto handle) wraps this as `StreamEncrypt(r io.Reader,
w io.Writer)` and `StreamDecrypt(r io.Reader, w io.Writer)`.
### 4. Key Derivation Hardening
Replace bare `SHA-256(password)` with Argon2id:
```go
key := argon2.IDKey(password, salt, time=3, memory=64*1024, threads=4, keyLen=32)
```
Parameters stored in the STIM header (section 3 above) so they can be tuned
without breaking existing archives. Random 16-byte salt generated per archive.
Backward compatibility: detect old format by checking for "STIM" magic. Old files
(no magic header) use legacy SHA-256 derivation with a deprecation warning.
### 5. Collect Local Streaming Pipeline
The new `collect local` pipeline for large directories:
```
filepath.WalkDir
→ tar.NewWriter (streaming)
→ xz/gzip compressor (streaming)
→ chunked AEAD encryptor (streaming)
→ os.File output
```
Memory usage: ~2 MiB regardless of input size (1 MiB compress buffer + 1 MiB
encrypt block).
Error handling:
- Broken symlinks: skip with warning (not fatal)
- Permission denied: skip with warning, continue
- Disk full on output: fatal, clean up partial file
- Read errors mid-stream: fatal, clean up partial file
Compression selection: `--compress=xz` (default, best ratio) or `--compress=gzip`
(faster). Matches existing Borg compression support.
### 6. Core Package Integration (Deferred)
Core imports Borg, so Borg cannot import core packages without creating a circular
dependency. Integration points are marked with TODOs for when the dependency
direction is resolved (likely by extracting shared interfaces to a common module):
- `core/go` config system → Borg config loading
- `core/go` logging → Borg Progress interface backend
- `core/go-store` → DataNode persistence
- `core/go` io.Medium → DataNode filesystem abstraction
## File Impact Summary
| Area | Files | Change Type |
|------|-------|-------------|
| DataNode | `pkg/datanode/*.go` | Modify (ToTarWriter, symlinks, AddPathOptions) |
| UI | `pkg/ui/*.go` | Rewrite (Progress interface, TTY detection) |
| TIM/STIM | `pkg/tim/*.go` | Modify (streaming encrypt/decrypt, new header) |
| Crypto | `pkg/tim/crypto.go` (new) | Create (Argon2id, chunked AEAD) |
| Collect | `cmd/collect_local.go` | Rewrite (streaming pipeline) |
| CLI | `cmd/root.go`, `cmd/*.go` | Modify (--quiet flag) |
## Testing Strategy
- Unit tests for each component (DataNode, Progress, chunked AEAD, Argon2id)
- Round-trip tests: encrypt → decrypt → compare original
- Large file test: 100 MiB synthetic directory through full pipeline
- Symlink matrix: valid internal, valid external, broken, nested
- Backward compatibility: decrypt old-format STIM with new code
- Race detector: `go test -race ./...`
## Dependencies
New:
- `golang.org/x/crypto/argon2` (Argon2id key derivation)
- `golang.org/x/term` (TTY detection via `term.IsTerminal`)
Existing (unchanged):
- `github.com/snider/Enchantrix` (ChaCha20-Poly1305 via Sigil)
- `github.com/ulikunitz/xz` (XZ compression)
## Risk Assessment
| Risk | Mitigation |
|------|------------|
| Breaking existing STIM format | Magic-byte detection for backward compat |
| Chunked AEAD security | Standard construction (each block independent nonce) |
| Circular dep with core | Deferred; TODO markers only |
| Large directory edge cases | Extensive symlink + permission test matrix |

File diff suppressed because it is too large Load diff

View file

@ -6,8 +6,8 @@ import (
"log"
"os"
"forge.lthn.ai/Snider/Borg/pkg/github"
"forge.lthn.ai/Snider/Borg/pkg/vcs"
"github.com/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/vcs"
)
func main() {

View file

@ -4,13 +4,13 @@ import (
"log"
"os"
"forge.lthn.ai/Snider/Borg/pkg/github"
"github.com/Snider/Borg/pkg/github"
)
func main() {
log.Println("Collecting GitHub release...")
owner, repo, err := github.ParseRepoFromURL("https://forge.lthn.ai/Snider/Borg")
owner, repo, err := github.ParseRepoFromURL("https://github.com/Snider/Borg")
if err != nil {
log.Fatalf("Failed to parse repo from URL: %v", err)
}

View file

@ -4,14 +4,14 @@ import (
"log"
"os"
"forge.lthn.ai/Snider/Borg/pkg/vcs"
"github.com/Snider/Borg/pkg/vcs"
)
func main() {
log.Println("Collecting GitHub repo...")
cloner := vcs.NewGitCloner()
dn, err := cloner.CloneGitRepository("https://forge.lthn.ai/Snider/Borg", nil)
dn, err := cloner.CloneGitRepository("https://github.com/Snider/Borg", nil)
if err != nil {
log.Fatalf("Failed to clone repository: %v", err)
}

View file

@ -4,7 +4,7 @@ import (
"log"
"os"
"forge.lthn.ai/Snider/Borg/pkg/pwa"
"github.com/Snider/Borg/pkg/pwa"
)
func main() {

View file

@ -4,7 +4,7 @@ import (
"log"
"os"
"forge.lthn.ai/Snider/Borg/pkg/website"
"github.com/Snider/Borg/pkg/website"
)
func main() {

View file

@ -4,8 +4,8 @@ import (
"log"
"os"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/tim"
)
func main() {

BIN
examples/demo-sample.smsg Normal file

Binary file not shown.

View file

@ -17,7 +17,7 @@ import (
"strconv"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/smsg"
)
// trackList allows multiple -track flags

View file

@ -5,8 +5,8 @@ import (
"io/fs"
"os"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/datanode"
)
func main() {

View file

@ -3,7 +3,7 @@ package main
import (
"log"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/tim"
)
func main() {

View file

@ -5,8 +5,8 @@ import (
"net/http"
"os"
"forge.lthn.ai/Snider/Borg/pkg/compress"
"forge.lthn.ai/Snider/Borg/pkg/tarfs"
"github.com/Snider/Borg/pkg/compress"
"github.com/Snider/Borg/pkg/tarfs"
)
func main() {

View file

@ -19,8 +19,8 @@ import (
"path/filepath"
"time"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"forge.lthn.ai/Snider/Borg/pkg/stmf"
"github.com/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/stmf"
)
func main() {

58
go.mod
View file

@ -1,74 +1,68 @@
module forge.lthn.ai/Snider/Borg
module github.com/Snider/Borg
go 1.25.0
require (
forge.lthn.ai/Snider/Enchantrix v0.0.4
github.com/Snider/Enchantrix v0.0.2
github.com/fatih/color v1.18.0
github.com/go-git/go-git/v5 v5.16.4
github.com/go-git/go-git/v5 v5.16.3
github.com/google/go-github/v39 v39.2.0
github.com/klauspost/compress v1.18.4
github.com/klauspost/compress v1.18.2
github.com/mattn/go-isatty v0.0.20
github.com/schollz/progressbar/v3 v3.18.0
github.com/spf13/cobra v1.10.2
github.com/spf13/cobra v1.10.1
github.com/ulikunitz/xz v0.5.15
github.com/wailsapp/wails/v2 v2.11.0
golang.org/x/crypto v0.48.0
golang.org/x/mod v0.33.0
golang.org/x/net v0.50.0
golang.org/x/oauth2 v0.35.0
golang.org/x/mod v0.30.0
golang.org/x/net v0.47.0
golang.org/x/oauth2 v0.33.0
)
require (
dario.cat/mergo v1.0.2 // indirect
dario.cat/mergo v1.0.0 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/ProtonMail/go-crypto v1.3.0 // indirect
github.com/bep/debounce v1.2.1 // indirect
github.com/clipperhouse/uax29/v2 v2.4.0 // indirect
github.com/cloudflare/circl v1.6.3 // indirect
github.com/cyphar/filepath-securejoin v0.6.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.7.0 // indirect
github.com/go-git/go-billy/v5 v5.6.2 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/godbus/dbus/v5 v5.2.2 // indirect
github.com/godbus/dbus/v5 v5.1.0 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1 // indirect
github.com/kevinburke/ssh_config v1.4.0 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/labstack/echo/v4 v4.13.3 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/leaanthony/go-ansi-parser v1.6.1 // indirect
github.com/leaanthony/gosod v1.0.4 // indirect
github.com/leaanthony/slicer v1.6.0 // indirect
github.com/leaanthony/u v1.1.1 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-runewidth v0.0.19 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/pjbgf/sha1cd v0.5.0 // indirect
github.com/pjbgf/sha1cd v0.3.2 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/samber/lo v1.52.0 // indirect
github.com/sergi/go-diff v1.4.0 // indirect
github.com/skeema/knownhosts v1.3.2 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/samber/lo v1.49.1 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/skeema/knownhosts v1.3.1 // indirect
github.com/spf13/pflag v1.0.9 // indirect
github.com/tkrajina/go-reflector v0.5.8 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasttemplate v1.2.2 // indirect
github.com/wailsapp/go-webview2 v1.0.23 // indirect
github.com/wailsapp/go-webview2 v1.0.22 // indirect
github.com/wailsapp/mimetype v1.4.1 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
golang.org/x/exp v0.0.0-20260212183809-81e46e3db34a // indirect
golang.org/x/sys v0.41.0 // indirect
golang.org/x/term v0.40.0 // indirect
golang.org/x/text v0.34.0 // indirect
golang.org/x/crypto v0.44.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/term v0.37.0 // indirect
golang.org/x/text v0.31.0 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
)

93
go.sum
View file

@ -1,10 +1,12 @@
dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
forge.lthn.ai/Snider/Enchantrix v0.0.4 h1:biwpix/bdedfyc0iVeK15awhhJKH6TEMYOTXzHXx5TI=
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
github.com/Snider/Enchantrix v0.0.2 h1:ExZQiBhfS/p/AHFTKhY80TOd+BXZjK95EzByAEgwvjs=
github.com/Snider/Enchantrix v0.0.2/go.mod h1:CtFcLAvnDT1KcuF1JBb/DJj0KplY8jHryO06KzQ1hsQ=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
@ -13,14 +15,14 @@ github.com/bep/debounce v1.2.1 h1:v67fRdBA9UQu2NhLFXrSg0Brw7CexQekrBwDMM8bzeY=
github.com/bep/debounce v1.2.1/go.mod h1:H8yggRPQKLUhUoqrJC1bO2xNya7vanpDl7xR3ISbCJ0=
github.com/chengxilo/virtualterm v1.0.4 h1:Z6IpERbRVlfB8WkOmtbHiDbBANU7cimRIof7mk9/PwM=
github.com/chengxilo/virtualterm v1.0.4/go.mod h1:DyxxBZz/x1iqJjFxTFcr6/x+jSpqN0iwWCOK1q10rlY=
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
github.com/clipperhouse/uax29/v2 v2.4.0 h1:RXqE/l5EiAbA4u97giimKNlmpvkmz+GrBVTelsoXy9g=
github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8=
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/cyphar/filepath-securejoin v0.6.1 h1:5CeZ1jPXEiYt3+Z6zqprSAgSWiggmpVyciv8syjIpVE=
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
@ -31,13 +33,16 @@ github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
github.com/go-git/go-billy/v5 v5.7.0 h1:83lBUJhGWhYp0ngzCMSgllhUSuoHP1iEWYjsPl9nwqM=
github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
github.com/go-git/go-git/v5 v5.16.4 h1:7ajIEZHZJULcyJebDLo99bGgS0jRrOxzZG4uCk2Yb2Y=
github.com/go-git/go-git/v5 v5.16.3 h1:Z8BtvxZ09bYm/yYNgPKCzgWtaRqDTgIKRgIRHBfU6Z8=
github.com/go-git/go-git/v5 v5.16.3/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8=
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
github.com/godbus/dbus/v5 v5.2.2 h1:TUR3TgtSVDmjiXOgAAyaZbYmIeP3DPkld3jgKGV8mXQ=
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
@ -58,10 +63,12 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1 h1:njuLRcjAuMKr7kI3D85AXWkw6/+v9PwtV6M6o11sWHQ=
github.com/kevinburke/ssh_config v1.4.0 h1:6xxtP5bZ2E4NF5tuQulISpTO2z8XbtH8cg1PWkxoFkQ=
github.com/klauspost/compress v1.18.4 h1:RPhnKRAQ4Fh8zU2FY/6ZFDwTVTxgJ/EMydqSTzE9a2c=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e h1:Q3+PugElBCf4PFpxhErSzU3/PY5sFL5Z6rfv4AbGAck=
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e/go.mod h1:alcuEEnZsY1WQsagKhZDsoPCRoOijYqhZvPwLG0kzVs=
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@ -86,36 +93,44 @@ github.com/leaanthony/u v1.1.1/go.mod h1:9+o6hejoRljvZ3BzdYlVL0JYCwtnAsVuN9pVTQc
github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
github.com/matryer/is v1.4.1 h1:55ehd8zaGABKLXQUe2awZ99BD/PTc2ls+KV/dXphgEQ=
github.com/matryer/is v1.4.1/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
github.com/pjbgf/sha1cd v0.5.0 h1:a+UkboSi1znleCDUNT3M5YxjOnN1fz2FhN48FlwCxs0=
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/samber/lo v1.52.0 h1:Rvi+3BFHES3A8meP33VPAxiBZX/Aws5RxrschYGjomw=
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
github.com/schollz/progressbar/v3 v3.18.0 h1:uXdoHABRFmNIjUfte/Ex7WtuyVslrw2wVPQmCN62HpA=
github.com/schollz/progressbar/v3 v3.18.0/go.mod h1:IsO3lpbaGuzh8zIMzgY3+J8l4C8GjO0Y9S69eFvNsec=
github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/skeema/knownhosts v1.3.2 h1:EDL9mgf4NzwMXCTfaxSD/o/a5fxDw/xL9nkU28JjdBg=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
@ -129,7 +144,8 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/wailsapp/go-webview2 v1.0.23 h1:jmv8qhz1lHibCc79bMM/a/FqOnnzOGEisLav+a0b9P0=
github.com/wailsapp/go-webview2 v1.0.22 h1:YT61F5lj+GGaat5OB96Aa3b4QA+mybD0Ggq6NZijQ58=
github.com/wailsapp/go-webview2 v1.0.22/go.mod h1:qJmWAmAmaniuKGZPWwne+uor3AHMB5PFhqiK0Bbj8kc=
github.com/wailsapp/mimetype v1.4.1 h1:pQN9ycO7uo4vsUUuPeHEYoUkLVkaRntMnHJxVwYhwHs=
github.com/wailsapp/mimetype v1.4.1/go.mod h1:9aV5k31bBOv5z6u+QP8TltzvNGJPmNJD4XlAL3U+j3o=
github.com/wailsapp/wails/v2 v2.11.0 h1:seLacV8pqupq32IjS4Y7V8ucab0WZwtK6VvUVxSBtqQ=
@ -139,17 +155,21 @@ github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
golang.org/x/exp v0.0.0-20260212183809-81e46e3db34a h1:ovFr6Z0MNmU7nH8VaX5xqw+05ST2uO1exVfZPVqRC5o=
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
golang.org/x/crypto v0.44.0 h1:A97SsFvM3AIwEEmTBiaxPPTYpDC47w720rdiiUvgoAU=
golang.org/x/crypto v0.44.0/go.mod h1:013i+Nw79BMiQiMsOPcVCB5ZIJbYkerPrGnOa00tvmc=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.35.0 h1:Mv2mzuHuZuY2+bkyWXIHMfhNdJAdwW3FuWeCPYN5GVQ=
golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo=
golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200810151505-1b9f1253b3ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -158,19 +178,20 @@ golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=

View file

@ -3,8 +3,8 @@ package main
import (
"os"
"forge.lthn.ai/Snider/Borg/cmd"
"forge.lthn.ai/Snider/Borg/pkg/logger"
"github.com/Snider/Borg/cmd"
"github.com/Snider/Borg/pkg/logger"
)
var osExit = os.Exit

View file

@ -9,7 +9,7 @@ import (
"fmt"
"os"
"forge.lthn.ai/Snider/Borg/pkg/stmf"
"github.com/Snider/Borg/pkg/stmf"
)
type TestVector struct {

View file

@ -3,34 +3,11 @@ package compress
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"github.com/ulikunitz/xz"
)
// nopCloser wraps an io.Writer with a no-op Close method.
type nopCloser struct{ io.Writer }
func (n *nopCloser) Close() error { return nil }
// NewCompressWriter returns a streaming io.WriteCloser that compresses data
// written to it into the underlying writer w using the specified format.
// Supported formats: "gz" (gzip), "xz", "none" or "" (passthrough).
// Unknown formats return an error.
func NewCompressWriter(w io.Writer, format string) (io.WriteCloser, error) {
switch format {
case "gz":
return gzip.NewWriter(w), nil
case "xz":
return xz.NewWriter(w)
case "none", "":
return &nopCloser{w}, nil
default:
return nil, fmt.Errorf("unsupported compression format: %q", format)
}
}
// Compress compresses data using the specified format.
func Compress(data []byte, format string) ([]byte, error) {
var buf bytes.Buffer

View file

@ -5,108 +5,6 @@ import (
"testing"
)
func TestNewCompressWriter_Gzip_Good(t *testing.T) {
original := []byte("hello, streaming gzip world")
var buf bytes.Buffer
w, err := NewCompressWriter(&buf, "gz")
if err != nil {
t.Fatalf("NewCompressWriter(gz) error: %v", err)
}
if _, err := w.Write(original); err != nil {
t.Fatalf("Write error: %v", err)
}
if err := w.Close(); err != nil {
t.Fatalf("Close error: %v", err)
}
compressed := buf.Bytes()
if bytes.Equal(original, compressed) {
t.Fatal("compressed data should differ from original")
}
decompressed, err := Decompress(compressed)
if err != nil {
t.Fatalf("Decompress error: %v", err)
}
if !bytes.Equal(original, decompressed) {
t.Errorf("round-trip mismatch: got %q, want %q", decompressed, original)
}
}
func TestNewCompressWriter_Xz_Good(t *testing.T) {
original := []byte("hello, streaming xz world")
var buf bytes.Buffer
w, err := NewCompressWriter(&buf, "xz")
if err != nil {
t.Fatalf("NewCompressWriter(xz) error: %v", err)
}
if _, err := w.Write(original); err != nil {
t.Fatalf("Write error: %v", err)
}
if err := w.Close(); err != nil {
t.Fatalf("Close error: %v", err)
}
compressed := buf.Bytes()
if bytes.Equal(original, compressed) {
t.Fatal("compressed data should differ from original")
}
decompressed, err := Decompress(compressed)
if err != nil {
t.Fatalf("Decompress error: %v", err)
}
if !bytes.Equal(original, decompressed) {
t.Errorf("round-trip mismatch: got %q, want %q", decompressed, original)
}
}
func TestNewCompressWriter_None_Good(t *testing.T) {
original := []byte("hello, passthrough world")
var buf bytes.Buffer
w, err := NewCompressWriter(&buf, "none")
if err != nil {
t.Fatalf("NewCompressWriter(none) error: %v", err)
}
if _, err := w.Write(original); err != nil {
t.Fatalf("Write error: %v", err)
}
if err := w.Close(); err != nil {
t.Fatalf("Close error: %v", err)
}
if !bytes.Equal(original, buf.Bytes()) {
t.Errorf("passthrough mismatch: got %q, want %q", buf.Bytes(), original)
}
// Also test empty string format
var buf2 bytes.Buffer
w2, err := NewCompressWriter(&buf2, "")
if err != nil {
t.Fatalf("NewCompressWriter('') error: %v", err)
}
if _, err := w2.Write(original); err != nil {
t.Fatalf("Write error: %v", err)
}
if err := w2.Close(); err != nil {
t.Fatalf("Close error: %v", err)
}
if !bytes.Equal(original, buf2.Bytes()) {
t.Errorf("passthrough (empty string) mismatch: got %q, want %q", buf2.Bytes(), original)
}
}
func TestNewCompressWriter_Bad(t *testing.T) {
var buf bytes.Buffer
_, err := NewCompressWriter(&buf, "invalid-format")
if err == nil {
t.Fatal("expected error for unknown compression format, got nil")
}
}
func TestGzip_Good(t *testing.T) {
originalData := []byte("hello, gzip world")
compressed, err := Compress(originalData, "gz")

View file

@ -8,8 +8,8 @@ import (
"os"
"sync"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/tim"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/tim"
)
//go:embed unlock.html

View file

@ -1,197 +0,0 @@
package datanode
import (
"os"
"path/filepath"
"runtime"
"testing"
)
func TestAddPath_Good(t *testing.T) {
// Create a temp directory with files and a nested subdirectory.
dir := t.TempDir()
if err := os.WriteFile(filepath.Join(dir, "hello.txt"), []byte("hello"), 0644); err != nil {
t.Fatal(err)
}
subdir := filepath.Join(dir, "sub")
if err := os.Mkdir(subdir, 0755); err != nil {
t.Fatal(err)
}
if err := os.WriteFile(filepath.Join(subdir, "world.txt"), []byte("world"), 0644); err != nil {
t.Fatal(err)
}
dn := New()
if err := dn.AddPath(dir, AddPathOptions{}); err != nil {
t.Fatalf("AddPath failed: %v", err)
}
// Verify files are stored with paths relative to dir, using forward slashes.
file, ok := dn.files["hello.txt"]
if !ok {
t.Fatal("hello.txt not found in datanode")
}
if string(file.content) != "hello" {
t.Errorf("expected content 'hello', got %q", file.content)
}
file, ok = dn.files["sub/world.txt"]
if !ok {
t.Fatal("sub/world.txt not found in datanode")
}
if string(file.content) != "world" {
t.Errorf("expected content 'world', got %q", file.content)
}
// Directories should not be stored explicitly.
if _, ok := dn.files["sub"]; ok {
t.Error("directories should not be stored as explicit entries")
}
if _, ok := dn.files["sub/"]; ok {
t.Error("directories should not be stored as explicit entries")
}
}
func TestAddPath_SkipBrokenSymlinks_Good(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("symlinks not reliably supported on Windows")
}
dir := t.TempDir()
// Create a real file.
if err := os.WriteFile(filepath.Join(dir, "real.txt"), []byte("real"), 0644); err != nil {
t.Fatal(err)
}
// Create a broken symlink (target does not exist).
if err := os.Symlink("/nonexistent/target", filepath.Join(dir, "broken.txt")); err != nil {
t.Fatal(err)
}
dn := New()
err := dn.AddPath(dir, AddPathOptions{SkipBrokenSymlinks: true})
if err != nil {
t.Fatalf("AddPath should not error with SkipBrokenSymlinks: %v", err)
}
// The real file should be present.
if _, ok := dn.files["real.txt"]; !ok {
t.Error("real.txt should be present")
}
// The broken symlink should be skipped.
if _, ok := dn.files["broken.txt"]; ok {
t.Error("broken.txt should have been skipped")
}
}
func TestAddPath_ExcludePatterns_Good(t *testing.T) {
dir := t.TempDir()
if err := os.WriteFile(filepath.Join(dir, "app.go"), []byte("package main"), 0644); err != nil {
t.Fatal(err)
}
if err := os.WriteFile(filepath.Join(dir, "debug.log"), []byte("log data"), 0644); err != nil {
t.Fatal(err)
}
if err := os.WriteFile(filepath.Join(dir, "error.log"), []byte("error data"), 0644); err != nil {
t.Fatal(err)
}
dn := New()
err := dn.AddPath(dir, AddPathOptions{
ExcludePatterns: []string{"*.log"},
})
if err != nil {
t.Fatalf("AddPath failed: %v", err)
}
// app.go should be present.
if _, ok := dn.files["app.go"]; !ok {
t.Error("app.go should be present")
}
// .log files should be excluded.
if _, ok := dn.files["debug.log"]; ok {
t.Error("debug.log should have been excluded")
}
if _, ok := dn.files["error.log"]; ok {
t.Error("error.log should have been excluded")
}
}
func TestAddPath_Bad(t *testing.T) {
dn := New()
err := dn.AddPath("/nonexistent/path/that/does/not/exist", AddPathOptions{})
if err == nil {
t.Fatal("expected error for nonexistent directory, got nil")
}
}
func TestAddPath_ValidSymlink_Good(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("symlinks not reliably supported on Windows")
}
dir := t.TempDir()
// Create a real file.
if err := os.WriteFile(filepath.Join(dir, "target.txt"), []byte("target content"), 0644); err != nil {
t.Fatal(err)
}
// Create a valid symlink pointing to the real file.
if err := os.Symlink("target.txt", filepath.Join(dir, "link.txt")); err != nil {
t.Fatal(err)
}
// Default behavior (FollowSymlinks=false): store as symlink.
dn := New()
err := dn.AddPath(dir, AddPathOptions{})
if err != nil {
t.Fatalf("AddPath failed: %v", err)
}
// The target file should be a regular file.
targetFile, ok := dn.files["target.txt"]
if !ok {
t.Fatal("target.txt not found")
}
if targetFile.isSymlink() {
t.Error("target.txt should not be a symlink")
}
if string(targetFile.content) != "target content" {
t.Errorf("expected content 'target content', got %q", targetFile.content)
}
// The symlink should be stored as a symlink entry.
linkFile, ok := dn.files["link.txt"]
if !ok {
t.Fatal("link.txt not found")
}
if !linkFile.isSymlink() {
t.Error("link.txt should be a symlink")
}
if linkFile.symlink != "target.txt" {
t.Errorf("expected symlink target 'target.txt', got %q", linkFile.symlink)
}
// Test with FollowSymlinks=true: store as regular file with target content.
dn2 := New()
err = dn2.AddPath(dir, AddPathOptions{FollowSymlinks: true})
if err != nil {
t.Fatalf("AddPath with FollowSymlinks failed: %v", err)
}
linkFile2, ok := dn2.files["link.txt"]
if !ok {
t.Fatal("link.txt not found with FollowSymlinks")
}
if linkFile2.isSymlink() {
t.Error("link.txt should NOT be a symlink when FollowSymlinks is true")
}
if string(linkFile2.content) != "target content" {
t.Errorf("expected content 'target content', got %q", linkFile2.content)
}
}

View file

@ -8,7 +8,6 @@ import (
"io/fs"
"os"
"path"
"path/filepath"
"sort"
"strings"
"time"
@ -43,15 +42,12 @@ func FromTar(tarball []byte) (*DataNode, error) {
return nil, err
}
switch header.Typeflag {
case tar.TypeReg:
if header.Typeflag == tar.TypeReg {
data, err := io.ReadAll(tarReader)
if err != nil {
return nil, err
}
dn.AddData(header.Name, data)
case tar.TypeSymlink:
dn.AddSymlink(header.Name, header.Linkname)
}
}
@ -64,30 +60,17 @@ func (d *DataNode) ToTar() ([]byte, error) {
tw := tar.NewWriter(buf)
for _, file := range d.files {
var hdr *tar.Header
if file.isSymlink() {
hdr = &tar.Header{
Typeflag: tar.TypeSymlink,
Name: file.name,
Linkname: file.symlink,
Mode: 0777,
ModTime: file.modTime,
}
} else {
hdr = &tar.Header{
Name: file.name,
Mode: 0600,
Size: int64(len(file.content)),
ModTime: file.modTime,
}
hdr := &tar.Header{
Name: file.name,
Mode: 0600,
Size: int64(len(file.content)),
ModTime: file.modTime,
}
if err := tw.WriteHeader(hdr); err != nil {
return nil, err
}
if !file.isSymlink() {
if _, err := tw.Write(file.content); err != nil {
return nil, err
}
if _, err := tw.Write(file.content); err != nil {
return nil, err
}
}
@ -98,51 +81,6 @@ func (d *DataNode) ToTar() ([]byte, error) {
return buf.Bytes(), nil
}
// ToTarWriter streams the DataNode contents to a tar writer.
// File keys are sorted for deterministic output.
func (d *DataNode) ToTarWriter(w io.Writer) error {
tw := tar.NewWriter(w)
defer tw.Close()
// Sort keys for deterministic output.
keys := make([]string, 0, len(d.files))
for k := range d.files {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
file := d.files[k]
var hdr *tar.Header
if file.isSymlink() {
hdr = &tar.Header{
Typeflag: tar.TypeSymlink,
Name: file.name,
Linkname: file.symlink,
Mode: 0777,
ModTime: file.modTime,
}
} else {
hdr = &tar.Header{
Name: file.name,
Mode: 0600,
Size: int64(len(file.content)),
ModTime: file.modTime,
}
}
if err := tw.WriteHeader(hdr); err != nil {
return err
}
if !file.isSymlink() {
if _, err := tw.Write(file.content); err != nil {
return err
}
}
}
return nil
}
// AddData adds a file to the DataNode.
func (d *DataNode) AddData(name string, content []byte) {
name = strings.TrimPrefix(name, "/")
@ -161,119 +99,6 @@ func (d *DataNode) AddData(name string, content []byte) {
}
}
// AddSymlink adds a symlink entry to the DataNode.
func (d *DataNode) AddSymlink(name, target string) {
name = strings.TrimPrefix(name, "/")
if name == "" {
return
}
if strings.HasSuffix(name, "/") {
return
}
d.files[name] = &dataFile{
name: name,
symlink: target,
modTime: time.Now(),
}
}
// AddPathOptions configures the behaviour of AddPath.
type AddPathOptions struct {
SkipBrokenSymlinks bool // skip broken symlinks instead of erroring
FollowSymlinks bool // follow symlinks and store target content (default false = store as symlinks)
ExcludePatterns []string // glob patterns to exclude (matched against basename)
}
// AddPath walks a real directory and adds its files to the DataNode.
// Paths are stored relative to dir, normalized with forward slashes.
// Directories are implicit and not stored.
func (d *DataNode) AddPath(dir string, opts AddPathOptions) error {
absDir, err := filepath.Abs(dir)
if err != nil {
return err
}
return filepath.WalkDir(absDir, func(p string, entry fs.DirEntry, err error) error {
if err != nil {
return err
}
// Skip the root directory itself.
if p == absDir {
return nil
}
// Compute relative path and normalize to forward slashes.
rel, err := filepath.Rel(absDir, p)
if err != nil {
return err
}
rel = filepath.ToSlash(rel)
// Skip directories — they are implicit in DataNode.
isSymlink := entry.Type()&fs.ModeSymlink != 0
if entry.IsDir() {
return nil
}
// Apply exclude patterns against basename.
base := filepath.Base(p)
for _, pattern := range opts.ExcludePatterns {
matched, matchErr := filepath.Match(pattern, base)
if matchErr != nil {
return matchErr
}
if matched {
return nil
}
}
// Handle symlinks.
if isSymlink {
linkTarget, err := os.Readlink(p)
if err != nil {
return err
}
// Resolve the symlink target to check if it exists.
absTarget := linkTarget
if !filepath.IsAbs(absTarget) {
absTarget = filepath.Join(filepath.Dir(p), linkTarget)
}
_, statErr := os.Stat(absTarget)
if statErr != nil {
// Broken symlink.
if opts.SkipBrokenSymlinks {
return nil
}
return statErr
}
if opts.FollowSymlinks {
// Read the target content and store as regular file.
content, err := os.ReadFile(absTarget)
if err != nil {
return err
}
d.AddData(rel, content)
} else {
// Store as symlink.
d.AddSymlink(rel, linkTarget)
}
return nil
}
// Regular file: read content and add.
content, err := os.ReadFile(p)
if err != nil {
return err
}
d.AddData(rel, content)
return nil
})
}
// Open opens a file from the DataNode.
func (d *DataNode) Open(name string) (fs.File, error) {
name = strings.TrimPrefix(name, "/")
@ -474,11 +299,8 @@ type dataFile struct {
name string
content []byte
modTime time.Time
symlink string
}
func (d *dataFile) isSymlink() bool { return d.symlink != "" }
func (d *dataFile) Stat() (fs.FileInfo, error) { return &dataFileInfo{file: d}, nil }
func (d *dataFile) Read(p []byte) (int, error) { return 0, io.EOF }
func (d *dataFile) Close() error { return nil }
@ -488,12 +310,7 @@ type dataFileInfo struct{ file *dataFile }
func (d *dataFileInfo) Name() string { return path.Base(d.file.name) }
func (d *dataFileInfo) Size() int64 { return int64(len(d.file.content)) }
func (d *dataFileInfo) Mode() fs.FileMode {
if d.file.isSymlink() {
return os.ModeSymlink | 0777
}
return 0444
}
func (d *dataFileInfo) Mode() fs.FileMode { return 0444 }
func (d *dataFileInfo) ModTime() time.Time { return d.file.modTime }
func (d *dataFileInfo) IsDir() bool { return false }
func (d *dataFileInfo) Sys() interface{} { return nil }

View file

@ -580,273 +580,6 @@ func TestFromTar_Bad(t *testing.T) {
}
}
func TestAddSymlink_Good(t *testing.T) {
dn := New()
dn.AddSymlink("link.txt", "target.txt")
file, ok := dn.files["link.txt"]
if !ok {
t.Fatal("symlink not found in datanode")
}
if file.symlink != "target.txt" {
t.Errorf("expected symlink target 'target.txt', got %q", file.symlink)
}
if !file.isSymlink() {
t.Error("expected isSymlink() to return true")
}
// Stat should return ModeSymlink
info, err := dn.Stat("link.txt")
if err != nil {
t.Fatalf("Stat failed: %v", err)
}
if info.Mode()&os.ModeSymlink == 0 {
t.Error("expected ModeSymlink to be set in file mode")
}
}
func TestSymlinkTarRoundTrip_Good(t *testing.T) {
dn1 := New()
dn1.AddData("real.txt", []byte("real content"))
dn1.AddSymlink("link.txt", "real.txt")
tarball, err := dn1.ToTar()
if err != nil {
t.Fatalf("ToTar failed: %v", err)
}
// Verify the tar contains a symlink entry
tr := tar.NewReader(bytes.NewReader(tarball))
foundSymlink := false
foundFile := false
for {
header, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
t.Fatalf("tar.Next failed: %v", err)
}
switch header.Name {
case "link.txt":
foundSymlink = true
if header.Typeflag != tar.TypeSymlink {
t.Errorf("expected TypeSymlink, got %d", header.Typeflag)
}
if header.Linkname != "real.txt" {
t.Errorf("expected Linkname 'real.txt', got %q", header.Linkname)
}
if header.Mode != 0777 {
t.Errorf("expected mode 0777, got %o", header.Mode)
}
case "real.txt":
foundFile = true
if header.Typeflag != tar.TypeReg {
t.Errorf("expected TypeReg for real.txt, got %d", header.Typeflag)
}
}
}
if !foundSymlink {
t.Error("symlink entry not found in tarball")
}
if !foundFile {
t.Error("regular file entry not found in tarball")
}
// Round-trip: FromTar should restore the symlink
dn2, err := FromTar(tarball)
if err != nil {
t.Fatalf("FromTar failed: %v", err)
}
// Verify the regular file survived
exists, _ := dn2.Exists("real.txt")
if !exists {
t.Error("real.txt missing after round-trip")
}
// Verify the symlink survived
linkFile, ok := dn2.files["link.txt"]
if !ok {
t.Fatal("link.txt missing after round-trip")
}
if !linkFile.isSymlink() {
t.Error("expected link.txt to be a symlink after round-trip")
}
if linkFile.symlink != "real.txt" {
t.Errorf("expected symlink target 'real.txt', got %q", linkFile.symlink)
}
// Stat should still report ModeSymlink
info, err := dn2.Stat("link.txt")
if err != nil {
t.Fatalf("Stat failed: %v", err)
}
if info.Mode()&os.ModeSymlink == 0 {
t.Error("expected ModeSymlink after round-trip")
}
}
func TestAddSymlink_Bad(t *testing.T) {
dn := New()
// Empty name should be ignored
dn.AddSymlink("", "target.txt")
if len(dn.files) != 0 {
t.Error("expected empty name to be ignored")
}
// Leading slash should be stripped
dn.AddSymlink("/link.txt", "target.txt")
if _, ok := dn.files["link.txt"]; !ok {
t.Error("expected leading slash to be stripped")
}
// Directory-like name (trailing slash) should be ignored
dn2 := New()
dn2.AddSymlink("dir/", "target")
if len(dn2.files) != 0 {
t.Error("expected directory-like name to be ignored")
}
}
func TestToTarWriter_Good(t *testing.T) {
dn := New()
dn.AddData("foo.txt", []byte("hello"))
dn.AddData("bar/baz.txt", []byte("world"))
var buf bytes.Buffer
if err := dn.ToTarWriter(&buf); err != nil {
t.Fatalf("ToTarWriter failed: %v", err)
}
// Round-trip through FromTar to verify contents survived.
dn2, err := FromTar(buf.Bytes())
if err != nil {
t.Fatalf("FromTar failed: %v", err)
}
// Verify foo.txt
f1, ok := dn2.files["foo.txt"]
if !ok {
t.Fatal("foo.txt missing after round-trip")
}
if string(f1.content) != "hello" {
t.Errorf("expected foo.txt content 'hello', got %q", f1.content)
}
// Verify bar/baz.txt
f2, ok := dn2.files["bar/baz.txt"]
if !ok {
t.Fatal("bar/baz.txt missing after round-trip")
}
if string(f2.content) != "world" {
t.Errorf("expected bar/baz.txt content 'world', got %q", f2.content)
}
// Verify deterministic ordering: bar/baz.txt should come before foo.txt.
tr := tar.NewReader(bytes.NewReader(buf.Bytes()))
header1, err := tr.Next()
if err != nil {
t.Fatalf("tar.Next failed: %v", err)
}
header2, err := tr.Next()
if err != nil {
t.Fatalf("tar.Next failed: %v", err)
}
if header1.Name != "bar/baz.txt" || header2.Name != "foo.txt" {
t.Errorf("expected sorted order [bar/baz.txt, foo.txt], got [%s, %s]",
header1.Name, header2.Name)
}
}
func TestToTarWriter_Symlinks_Good(t *testing.T) {
dn := New()
dn.AddData("real.txt", []byte("real content"))
dn.AddSymlink("link.txt", "real.txt")
var buf bytes.Buffer
if err := dn.ToTarWriter(&buf); err != nil {
t.Fatalf("ToTarWriter failed: %v", err)
}
// Round-trip through FromTar.
dn2, err := FromTar(buf.Bytes())
if err != nil {
t.Fatalf("FromTar failed: %v", err)
}
// Verify regular file survived.
realFile, ok := dn2.files["real.txt"]
if !ok {
t.Fatal("real.txt missing after round-trip")
}
if string(realFile.content) != "real content" {
t.Errorf("expected 'real content', got %q", realFile.content)
}
// Verify symlink survived.
linkFile, ok := dn2.files["link.txt"]
if !ok {
t.Fatal("link.txt missing after round-trip")
}
if !linkFile.isSymlink() {
t.Error("expected link.txt to be a symlink")
}
if linkFile.symlink != "real.txt" {
t.Errorf("expected symlink target 'real.txt', got %q", linkFile.symlink)
}
// Also verify the raw tar entries have correct types and modes.
tr := tar.NewReader(bytes.NewReader(buf.Bytes()))
for {
header, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
t.Fatalf("tar.Next failed: %v", err)
}
switch header.Name {
case "link.txt":
if header.Typeflag != tar.TypeSymlink {
t.Errorf("expected TypeSymlink for link.txt, got %d", header.Typeflag)
}
if header.Linkname != "real.txt" {
t.Errorf("expected Linkname 'real.txt', got %q", header.Linkname)
}
if header.Mode != 0777 {
t.Errorf("expected mode 0777 for symlink, got %o", header.Mode)
}
case "real.txt":
if header.Typeflag != tar.TypeReg {
t.Errorf("expected TypeReg for real.txt, got %d", header.Typeflag)
}
if header.Mode != 0600 {
t.Errorf("expected mode 0600 for regular file, got %o", header.Mode)
}
}
}
}
func TestToTarWriter_Empty_Good(t *testing.T) {
dn := New()
var buf bytes.Buffer
if err := dn.ToTarWriter(&buf); err != nil {
t.Fatalf("ToTarWriter on empty DataNode should not error, got: %v", err)
}
// The buffer should contain a valid (empty) tar archive.
dn2, err := FromTar(buf.Bytes())
if err != nil {
t.Fatalf("FromTar on empty tar failed: %v", err)
}
if len(dn2.files) != 0 {
t.Errorf("expected 0 files in empty round-trip, got %d", len(dn2.files))
}
}
func toSortedNames(entries []fs.DirEntry) []string {
var names []string
for _, e := range entries {

View file

@ -8,6 +8,7 @@ import (
"os"
"strings"
"github.com/Snider/Borg/pkg/datanode"
"golang.org/x/oauth2"
)
@ -18,6 +19,8 @@ type Repo struct {
// GithubClient is an interface for interacting with the Github API.
type GithubClient interface {
GetPublicRepos(ctx context.Context, userOrOrg string) ([]string, error)
GetIssues(ctx context.Context, owner, repo string) (*datanode.DataNode, error)
GetPullRequests(ctx context.Context, owner, repo string) (*datanode.DataNode, error)
}
// NewGithubClient creates a new GithubClient.
@ -25,7 +28,9 @@ func NewGithubClient() GithubClient {
return &githubClient{}
}
type githubClient struct{}
type githubClient struct {
apiURL string
}
// NewAuthenticatedClient creates a new authenticated http client.
var NewAuthenticatedClient = func(ctx context.Context) *http.Client {

View file

@ -8,7 +8,7 @@ import (
"strings"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/mocks"
"github.com/Snider/Borg/pkg/mocks"
)
func TestGetPublicRepos_Good(t *testing.T) {

156
pkg/github/issue.go Normal file
View file

@ -0,0 +1,156 @@
package github
import (
"context"
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
"github.com/Snider/Borg/pkg/datanode"
)
type Issue struct {
Number int `json:"number"`
Title string `json:"title"`
Body string `json:"body"`
State string `json:"state"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
User struct {
Login string `json:"login"`
} `json:"user"`
Labels []struct {
Name string `json:"name"`
} `json:"labels"`
CommentsURL string `json:"comments_url"`
}
type Comment struct {
Body string `json:"body"`
CreatedAt time.Time `json:"created_at"`
User struct {
Login string `json:"login"`
} `json:"user"`
}
func (g *githubClient) GetIssues(ctx context.Context, owner, repo string) (*datanode.DataNode, error) {
dn := datanode.New()
client := NewAuthenticatedClient(ctx)
apiURL := "https://api.github.com"
if g.apiURL != "" {
apiURL = g.apiURL
}
url := fmt.Sprintf("%s/repos/%s/%s/issues", apiURL, owner, repo)
var allIssues []Issue
for url != "" {
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Borg-Data-Collector")
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
return nil, fmt.Errorf("failed to fetch issues: %s", resp.Status)
}
var issues []Issue
if err := json.NewDecoder(resp.Body).Decode(&issues); err != nil {
return nil, err
}
allIssues = append(allIssues, issues...)
linkHeader := resp.Header.Get("Link")
url = g.findNextURL(linkHeader)
}
for _, issue := range allIssues {
var markdown strings.Builder
markdown.WriteString(fmt.Sprintf("# Issue %d: %s\n\n", issue.Number, issue.Title))
markdown.WriteString(fmt.Sprintf("**Author**: %s\n", issue.User.Login))
markdown.WriteString(fmt.Sprintf("**State**: %s\n", issue.State))
markdown.WriteString(fmt.Sprintf("**Created**: %s\n", issue.CreatedAt.Format(time.RFC1123)))
markdown.WriteString(fmt.Sprintf("**Updated**: %s\n\n", issue.UpdatedAt.Format(time.RFC1123)))
if len(issue.Labels) > 0 {
markdown.WriteString("**Labels**:\n")
for _, label := range issue.Labels {
markdown.WriteString(fmt.Sprintf("- %s\n", label.Name))
}
markdown.WriteString("\n")
}
markdown.WriteString("## Body\n\n")
markdown.WriteString(issue.Body)
markdown.WriteString("\n\n")
// Fetch comments
comments, err := g.getComments(ctx, issue.CommentsURL)
if err != nil {
return nil, err
}
if len(comments) > 0 {
markdown.WriteString("## Comments\n\n")
for _, comment := range comments {
markdown.WriteString(fmt.Sprintf("**%s** commented on %s:\n\n", comment.User.Login, comment.CreatedAt.Format(time.RFC1123)))
markdown.WriteString(comment.Body)
markdown.WriteString("\n\n---\n\n")
}
}
filename := fmt.Sprintf("issues/%d.md", issue.Number)
dn.AddData(filename, []byte(markdown.String()))
}
// Add an index file
index, err := json.MarshalIndent(allIssues, "", " ")
if err != nil {
return nil, err
}
dn.AddData("issues/INDEX.json", index)
return dn, nil
}
func (g *githubClient) getComments(ctx context.Context, url string) ([]Comment, error) {
client := NewAuthenticatedClient(ctx)
var allComments []Comment
for url != "" {
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Borg-Data-Collector")
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
return nil, fmt.Errorf("failed to fetch comments: %s", resp.Status)
}
var comments []Comment
if err := json.NewDecoder(resp.Body).Decode(&comments); err != nil {
return nil, err
}
allComments = append(allComments, comments...)
linkHeader := resp.Header.Get("Link")
url = g.findNextURL(linkHeader)
}
return allComments, nil
}

67
pkg/github/issue_test.go Normal file
View file

@ -0,0 +1,67 @@
package github
import (
"context"
"encoding/json"
"io/fs"
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
)
func TestGetIssues(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/repos/owner/repo/issues" {
w.Header().Set("Content-Type", "application/json")
issues := []Issue{
{Number: 1, Title: "Issue 1", CommentsURL: "http://" + r.Host + "/repos/owner/repo/issues/1/comments"},
{Number: 2, Title: "Issue 2", CommentsURL: "http://" + r.Host + "/repos/owner/repo/issues/2/comments"},
}
json.NewEncoder(w).Encode(issues)
} else if r.URL.Path == "/repos/owner/repo/issues/1/comments" {
w.Header().Set("Content-Type", "application/json")
comments := []Comment{
{Body: "Comment 1"},
}
json.NewEncoder(w).Encode(comments)
} else if r.URL.Path == "/repos/owner/repo/issues/2/comments" {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte("[]"))
} else {
http.NotFound(w, r)
}
}))
defer server.Close()
originalNewAuthenticatedClient := NewAuthenticatedClient
NewAuthenticatedClient = func(ctx context.Context) *http.Client {
return server.Client()
}
defer func() {
NewAuthenticatedClient = originalNewAuthenticatedClient
}()
client := &githubClient{apiURL: server.URL}
dn, err := client.GetIssues(context.Background(), "owner", "repo")
assert.NoError(t, err)
assert.NotNil(t, dn)
expectedFiles := []string{
"issues/1.md",
"issues/2.md",
"issues/INDEX.json",
}
actualFiles := []string{}
dn.Walk(".", func(path string, de fs.DirEntry, err error) error {
if !de.IsDir() {
actualFiles = append(actualFiles, path)
}
return nil
})
assert.ElementsMatch(t, expectedFiles, actualFiles)
}

201
pkg/github/pull_request.go Normal file
View file

@ -0,0 +1,201 @@
package github
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
"github.com/Snider/Borg/pkg/datanode"
)
type PullRequest struct {
Number int `json:"number"`
Title string `json:"title"`
Body string `json:"body"`
State string `json:"state"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
MergedAt time.Time `json:"merged_at"`
User struct {
Login string `json:"login"`
} `json:"user"`
Labels []struct {
Name string `json:"name"`
} `json:"labels"`
Links struct {
Comments struct {
Href string `json:"href"`
} `json:"comments"`
ReviewComments struct {
Href string `json:"href"`
} `json:"review_comments"`
} `json:"_links"`
DiffURL string `json:"diff_url"`
}
type ReviewComment struct {
Body string `json:"body"`
Path string `json:"path"`
CreatedAt time.Time `json:"created_at"`
User struct {
Login string `json:"login"`
} `json:"user"`
}
func (g *githubClient) GetPullRequests(ctx context.Context, owner, repo string) (*datanode.DataNode, error) {
dn := datanode.New()
client := NewAuthenticatedClient(ctx)
apiURL := "https://api.github.com"
if g.apiURL != "" {
apiURL = g.apiURL
}
// Get both open and closed pull requests
url := fmt.Sprintf("%s/repos/%s/%s/pulls?state=all", apiURL, owner, repo)
var allPRs []PullRequest
for url != "" {
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Borg-Data-Collector")
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
return nil, fmt.Errorf("failed to fetch pull requests: %s", resp.Status)
}
var prs []PullRequest
if err := json.NewDecoder(resp.Body).Decode(&prs); err != nil {
return nil, err
}
allPRs = append(allPRs, prs...)
linkHeader := resp.Header.Get("Link")
url = g.findNextURL(linkHeader)
}
for _, pr := range allPRs {
var markdown strings.Builder
markdown.WriteString(fmt.Sprintf("# PR %d: %s\n\n", pr.Number, pr.Title))
markdown.WriteString(fmt.Sprintf("**Author**: %s\n", pr.User.Login))
markdown.WriteString(fmt.Sprintf("**State**: %s\n", pr.State))
markdown.WriteString(fmt.Sprintf("**Created**: %s\n", pr.CreatedAt.Format(time.RFC1123)))
markdown.WriteString(fmt.Sprintf("**Updated**: %s\n", pr.UpdatedAt.Format(time.RFC1123)))
if !pr.MergedAt.IsZero() {
markdown.WriteString(fmt.Sprintf("**Merged**: %s\n", pr.MergedAt.Format(time.RFC1123)))
}
markdown.WriteString(fmt.Sprintf("\n**[View Diff](%s)**\n\n", pr.DiffURL))
if len(pr.Labels) > 0 {
markdown.WriteString("**Labels**:\n")
for _, label := range pr.Labels {
markdown.WriteString(fmt.Sprintf("- %s\n", label.Name))
}
markdown.WriteString("\n")
}
markdown.WriteString("## Body\n\n")
markdown.WriteString(pr.Body)
markdown.WriteString("\n\n")
// Fetch diff
diff, err := g.getDiff(ctx, pr.DiffURL)
if err != nil {
return nil, fmt.Errorf("failed to get diff for PR #%d: %w", pr.Number, err)
}
dn.AddData(fmt.Sprintf("pulls/%d.diff", pr.Number), diff)
// Fetch review comments
reviewComments, err := g.getReviewComments(ctx, pr.Links.ReviewComments.Href)
if err != nil {
return nil, err
}
if len(reviewComments) > 0 {
markdown.WriteString("## Review Comments\n\n")
for _, comment := range reviewComments {
markdown.WriteString(fmt.Sprintf("**%s** commented on `%s` at %s:\n\n", comment.User.Login, comment.Path, comment.CreatedAt.Format(time.RFC1123)))
markdown.WriteString(comment.Body)
markdown.WriteString("\n\n---\n\n")
}
}
filename := fmt.Sprintf("pulls/%d.md", pr.Number)
dn.AddData(filename, []byte(markdown.String()))
}
// Add an index file
index, err := json.MarshalIndent(allPRs, "", " ")
if err != nil {
return nil, err
}
dn.AddData("pulls/INDEX.json", index)
return dn, nil
}
func (g *githubClient) getReviewComments(ctx context.Context, url string) ([]ReviewComment, error) {
client := NewAuthenticatedClient(ctx)
var allComments []ReviewComment
for url != "" {
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Borg-Data-Collector")
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
return nil, fmt.Errorf("failed to fetch review comments: %s", resp.Status)
}
var comments []ReviewComment
if err := json.NewDecoder(resp.Body).Decode(&comments); err != nil {
return nil, err
}
allComments = append(allComments, comments...)
linkHeader := resp.Header.Get("Link")
url = g.findNextURL(linkHeader)
}
return allComments, nil
}
func (g *githubClient) getDiff(ctx context.Context, url string) ([]byte, error) {
client := NewAuthenticatedClient(ctx)
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Borg-Data-Collector")
req.Header.Set("Accept", "application/vnd.github.v3.diff")
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("failed to fetch diff: %s", resp.Status)
}
return io.ReadAll(resp.Body)
}

View file

@ -0,0 +1,91 @@
package github
import (
"context"
"encoding/json"
"io/fs"
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
)
func TestGetPullRequests(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/repos/owner/repo/pulls" {
w.Header().Set("Content-Type", "application/json")
prs := []PullRequest{
{
Number: 1, Title: "PR 1",
DiffURL: "http://" + r.Host + "/repos/owner/repo/pulls/1.diff",
Links: struct {
Comments struct{ Href string `json:"href"` } `json:"comments"`
ReviewComments struct{ Href string `json:"href"` } `json:"review_comments"`
}{
ReviewComments: struct{ Href string `json:"href"` }{Href: "http://" + r.Host + "/repos/owner/repo/pulls/1/comments"},
},
},
{
Number: 2, Title: "PR 2",
DiffURL: "http://" + r.Host + "/repos/owner/repo/pulls/2.diff",
Links: struct {
Comments struct{ Href string `json:"href"` } `json:"comments"`
ReviewComments struct{ Href string `json:"href"` } `json:"review_comments"`
}{
ReviewComments: struct{ Href string `json:"href"` }{Href: "http://" + r.Host + "/repos/owner/repo/pulls/2/comments"},
},
},
}
json.NewEncoder(w).Encode(prs)
} else if r.URL.Path == "/repos/owner/repo/pulls/1.diff" {
w.Write([]byte("diff --git a/file b/file"))
} else if r.URL.Path == "/repos/owner/repo/pulls/1/comments" {
w.Header().Set("Content-Type", "application/json")
comments := []ReviewComment{
{Body: "Review Comment 1"},
}
json.NewEncoder(w).Encode(comments)
} else if r.URL.Path == "/repos/owner/repo/pulls/2.diff" {
w.Write([]byte("diff --git a/file2 b/file2"))
} else if r.URL.Path == "/repos/owner/repo/pulls/2/comments" {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte("[]"))
} else {
http.NotFound(w, r)
}
}))
defer server.Close()
originalNewAuthenticatedClient := NewAuthenticatedClient
NewAuthenticatedClient = func(ctx context.Context) *http.Client {
return server.Client()
}
defer func() {
NewAuthenticatedClient = originalNewAuthenticatedClient
}()
client := &githubClient{apiURL: server.URL}
dn, err := client.GetPullRequests(context.Background(), "owner", "repo")
assert.NoError(t, err)
assert.NotNil(t, dn)
expectedFiles := []string{
"pulls/1.md",
"pulls/1.diff",
"pulls/2.md",
"pulls/2.diff",
"pulls/INDEX.json",
}
actualFiles := []string{}
dn.Walk(".", func(path string, de fs.DirEntry, err error) error {
if !de.IsDir() {
actualFiles = append(actualFiles, path)
}
return nil
})
assert.ElementsMatch(t, expectedFiles, actualFiles)
}

View file

@ -8,7 +8,7 @@ import (
"net/url"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/mocks"
"github.com/Snider/Borg/pkg/mocks"
"github.com/google/go-github/v39/github"
)

View file

@ -3,8 +3,8 @@ package mocks
import (
"io"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Borg/pkg/vcs"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/vcs"
)
// MockGitCloner is a mock implementation of the GitCloner interface.

View file

@ -10,7 +10,7 @@ import (
"net/http"
"time"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/smsg"
)
// Player provides media decryption and playback services

View file

@ -11,7 +11,7 @@ import (
"strings"
"sync"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/datanode"
"github.com/schollz/progressbar/v3"
"golang.org/x/net/html"
)
@ -217,9 +217,7 @@ func (p *pwaClient) DownloadAndPackagePWA(pwaURL, manifestURL string, bar *progr
if path == "" {
path = "index.html"
}
mu.Lock()
dn.AddData(path, body)
mu.Unlock()
// Parse HTML for additional assets
if parseHTML && isHTMLContent(resp.Header.Get("Content-Type"), body) {

View file

@ -16,7 +16,7 @@ package smsg
// This means wrapped keys, encrypted payloads, etc. are self-contained.
// You only need the correct key to decrypt - no nonce management required.
//
// See: forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix/crypto_sigil.go
// See: github.com/Snider/Enchantrix/pkg/enchantrix/crypto_sigil.go
import (
"bytes"
@ -29,8 +29,8 @@ import (
"io"
"time"
"forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix"
"forge.lthn.ai/Snider/Enchantrix/pkg/trix"
"github.com/Snider/Enchantrix/pkg/enchantrix"
"github.com/Snider/Enchantrix/pkg/trix"
"github.com/klauspost/compress/zstd"
)

View file

@ -29,9 +29,9 @@ import (
"fmt"
"time"
"forge.lthn.ai/Snider/Enchantrix/pkg/crypt"
"forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix"
"forge.lthn.ai/Snider/Enchantrix/pkg/trix"
"github.com/Snider/Enchantrix/pkg/crypt"
"github.com/Snider/Enchantrix/pkg/enchantrix"
"github.com/Snider/Enchantrix/pkg/trix"
)
// StreamParams contains the parameters needed for stream key derivation

View file

@ -7,8 +7,8 @@ import (
"encoding/json"
"fmt"
"forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix"
"forge.lthn.ai/Snider/Enchantrix/pkg/trix"
"github.com/Snider/Enchantrix/pkg/enchantrix"
"github.com/Snider/Enchantrix/pkg/trix"
)
// Decrypt decrypts a STMF payload using the server's private key.

View file

@ -8,8 +8,8 @@ import (
"encoding/json"
"fmt"
"forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix"
"forge.lthn.ai/Snider/Enchantrix/pkg/trix"
"github.com/Snider/Enchantrix/pkg/enchantrix"
"github.com/Snider/Enchantrix/pkg/trix"
)
// Encrypt encrypts form data using the server's public key.

View file

@ -7,7 +7,7 @@ import (
"net/http"
"net/url"
"forge.lthn.ai/Snider/Borg/pkg/stmf"
"github.com/Snider/Borg/pkg/stmf"
)
// contextKey is a custom type for context keys to avoid collisions

View file

@ -7,7 +7,7 @@ import (
"strings"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/stmf"
"github.com/Snider/Borg/pkg/stmf"
)
func TestMiddleware(t *testing.T) {

View file

@ -1,6 +1,6 @@
package tim
import "forge.lthn.ai/Snider/Enchantrix/pkg/trix"
import "github.com/Snider/Enchantrix/pkg/trix"
// DefaultSpec returns a default runc spec.
func defaultConfig() (*trix.Trix, error) {

View file

@ -5,7 +5,7 @@ import (
"path/filepath"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/trix"
"github.com/Snider/Borg/pkg/trix"
)
func TestToFromSigil(t *testing.T) {

View file

@ -1,198 +0,0 @@
package tim
import (
"crypto/rand"
"encoding/binary"
"errors"
"fmt"
"io"
"golang.org/x/crypto/argon2"
"golang.org/x/crypto/chacha20poly1305"
borgtrix "forge.lthn.ai/Snider/Borg/pkg/trix"
)
const (
blockSize = 1024 * 1024 // 1 MiB plaintext blocks
saltSize = 16
nonceSize = 12 // chacha20poly1305.NonceSize
lengthSize = 4
headerSize = 33 // 4 (magic) + 1 (version) + 16 (salt) + 12 (argon2 params)
)
var (
stimMagic = [4]byte{'S', 'T', 'I', 'M'}
ErrInvalidMagic = errors.New("invalid STIM magic header")
ErrUnsupportedVersion = errors.New("unsupported STIM version")
ErrStreamDecrypt = errors.New("stream decryption failed")
)
// StreamEncrypt reads plaintext from r and writes STIM v2 chunked AEAD
// encrypted data to w. Each 1 MiB block is independently encrypted with
// ChaCha20-Poly1305 using a unique random nonce.
func StreamEncrypt(r io.Reader, w io.Writer, password string) error {
// Generate random salt
salt := make([]byte, saltSize)
if _, err := rand.Read(salt); err != nil {
return fmt.Errorf("failed to generate salt: %w", err)
}
// Derive key using Argon2id with default params
params := borgtrix.DefaultArgon2Params()
key := borgtrix.DeriveKeyArgon2(password, salt)
// Create AEAD cipher
aead, err := chacha20poly1305.New(key)
if err != nil {
return fmt.Errorf("failed to create AEAD: %w", err)
}
// Write header: magic(4) + version(1) + salt(16) + argon2params(12) = 33 bytes
header := make([]byte, headerSize)
copy(header[0:4], stimMagic[:])
header[4] = 2 // version
copy(header[5:21], salt)
copy(header[21:33], params.Encode())
if _, err := w.Write(header); err != nil {
return fmt.Errorf("failed to write header: %w", err)
}
// Encrypt data in blocks
buf := make([]byte, blockSize)
nonce := make([]byte, nonceSize)
for {
n, readErr := io.ReadFull(r, buf)
if n > 0 {
// Generate unique nonce for this block
if _, err := rand.Read(nonce); err != nil {
return fmt.Errorf("failed to generate nonce: %w", err)
}
// Encrypt: ciphertext includes the Poly1305 auth tag (16 bytes)
ciphertext := aead.Seal(nil, nonce, buf[:n], nil)
// Write [nonce(12)][length(4)][ciphertext(n+16)]
if _, err := w.Write(nonce); err != nil {
return fmt.Errorf("failed to write nonce: %w", err)
}
lenBuf := make([]byte, lengthSize)
binary.LittleEndian.PutUint32(lenBuf, uint32(len(ciphertext)))
if _, err := w.Write(lenBuf); err != nil {
return fmt.Errorf("failed to write length: %w", err)
}
if _, err := w.Write(ciphertext); err != nil {
return fmt.Errorf("failed to write ciphertext: %w", err)
}
}
if readErr != nil {
if readErr == io.EOF || readErr == io.ErrUnexpectedEOF {
break
}
return fmt.Errorf("failed to read input: %w", readErr)
}
}
// Write EOF marker: [nonce(12)][length=0(4)]
if _, err := rand.Read(nonce); err != nil {
return fmt.Errorf("failed to generate EOF nonce: %w", err)
}
if _, err := w.Write(nonce); err != nil {
return fmt.Errorf("failed to write EOF nonce: %w", err)
}
eofLen := make([]byte, lengthSize)
// length is already zero (zero-value)
if _, err := w.Write(eofLen); err != nil {
return fmt.Errorf("failed to write EOF length: %w", err)
}
return nil
}
// StreamDecrypt reads STIM v2 chunked AEAD encrypted data from r and writes
// the decrypted plaintext to w. Returns an error if the header is invalid,
// the password is wrong, or data has been tampered with.
func StreamDecrypt(r io.Reader, w io.Writer, password string) error {
// Read header
header := make([]byte, headerSize)
if _, err := io.ReadFull(r, header); err != nil {
return fmt.Errorf("failed to read header: %w", err)
}
// Validate magic
if header[0] != stimMagic[0] || header[1] != stimMagic[1] ||
header[2] != stimMagic[2] || header[3] != stimMagic[3] {
return ErrInvalidMagic
}
// Validate version
if header[4] != 2 {
return fmt.Errorf("%w: got %d", ErrUnsupportedVersion, header[4])
}
// Extract salt and params
salt := header[5:21]
params := borgtrix.DecodeArgon2Params(header[21:33])
// Derive key using stored params
key := deriveKeyWithParams(password, salt, params)
// Create AEAD cipher
aead, err := chacha20poly1305.New(key)
if err != nil {
return fmt.Errorf("failed to create AEAD: %w", err)
}
// Decrypt blocks
nonce := make([]byte, nonceSize)
lenBuf := make([]byte, lengthSize)
for {
// Read nonce
if _, err := io.ReadFull(r, nonce); err != nil {
return fmt.Errorf("failed to read block nonce: %w", err)
}
// Read length
if _, err := io.ReadFull(r, lenBuf); err != nil {
return fmt.Errorf("failed to read block length: %w", err)
}
ctLen := binary.LittleEndian.Uint32(lenBuf)
// EOF marker: length == 0
if ctLen == 0 {
return nil
}
// Read ciphertext
ciphertext := make([]byte, ctLen)
if _, err := io.ReadFull(r, ciphertext); err != nil {
return fmt.Errorf("failed to read ciphertext: %w", err)
}
// Decrypt and authenticate
plaintext, err := aead.Open(nil, nonce, ciphertext, nil)
if err != nil {
return fmt.Errorf("%w: %v", ErrStreamDecrypt, err)
}
if _, err := w.Write(plaintext); err != nil {
return fmt.Errorf("failed to write plaintext: %w", err)
}
}
}
// deriveKeyWithParams derives a 32-byte key using Argon2id with specific
// parameters read from the STIM header (rather than using defaults).
func deriveKeyWithParams(password string, salt []byte, params borgtrix.Argon2Params) []byte {
return argon2.IDKey([]byte(password), salt, params.Time, params.Memory, uint8(params.Threads), 32)
}

View file

@ -1,203 +0,0 @@
package tim
import (
"bytes"
"crypto/rand"
"io"
"testing"
)
func TestStreamRoundTrip_Good(t *testing.T) {
plaintext := []byte("Hello, STIM v2 streaming encryption!")
password := "test-password-123"
// Encrypt
var cipherBuf bytes.Buffer
if err := StreamEncrypt(bytes.NewReader(plaintext), &cipherBuf, password); err != nil {
t.Fatalf("StreamEncrypt() error = %v", err)
}
// Verify header magic
encrypted := cipherBuf.Bytes()
if len(encrypted) < 5 {
t.Fatal("encrypted output too short for header")
}
if string(encrypted[:4]) != "STIM" {
t.Errorf("expected magic 'STIM', got %q", string(encrypted[:4]))
}
if encrypted[4] != 2 {
t.Errorf("expected version 2, got %d", encrypted[4])
}
// Decrypt
var plainBuf bytes.Buffer
if err := StreamDecrypt(bytes.NewReader(encrypted), &plainBuf, password); err != nil {
t.Fatalf("StreamDecrypt() error = %v", err)
}
if !bytes.Equal(plainBuf.Bytes(), plaintext) {
t.Errorf("round-trip mismatch:\n got: %q\n want: %q", plainBuf.Bytes(), plaintext)
}
}
func TestStreamRoundTrip_Large_Good(t *testing.T) {
// 3 MiB of pseudo-random data spans multiple 1 MiB blocks
plaintext := make([]byte, 3*1024*1024)
if _, err := rand.Read(plaintext); err != nil {
t.Fatalf("failed to generate random data: %v", err)
}
password := "large-data-password"
// Encrypt
var cipherBuf bytes.Buffer
if err := StreamEncrypt(bytes.NewReader(plaintext), &cipherBuf, password); err != nil {
t.Fatalf("StreamEncrypt() error = %v", err)
}
// Decrypt
var plainBuf bytes.Buffer
if err := StreamDecrypt(bytes.NewReader(cipherBuf.Bytes()), &plainBuf, password); err != nil {
t.Fatalf("StreamDecrypt() error = %v", err)
}
if !bytes.Equal(plainBuf.Bytes(), plaintext) {
t.Errorf("round-trip mismatch: got %d bytes, want %d bytes", plainBuf.Len(), len(plaintext))
}
}
func TestStreamEncrypt_Empty_Good(t *testing.T) {
password := "empty-test"
// Encrypt empty input
var cipherBuf bytes.Buffer
if err := StreamEncrypt(bytes.NewReader(nil), &cipherBuf, password); err != nil {
t.Fatalf("StreamEncrypt() error = %v", err)
}
// Decrypt
var plainBuf bytes.Buffer
if err := StreamDecrypt(bytes.NewReader(cipherBuf.Bytes()), &plainBuf, password); err != nil {
t.Fatalf("StreamDecrypt() error = %v", err)
}
if plainBuf.Len() != 0 {
t.Errorf("expected empty output, got %d bytes", plainBuf.Len())
}
}
func TestStreamDecrypt_WrongPassword_Bad(t *testing.T) {
plaintext := []byte("secret data that should not decrypt with wrong key")
correctPassword := "correct-password"
wrongPassword := "wrong-password"
// Encrypt with correct password
var cipherBuf bytes.Buffer
if err := StreamEncrypt(bytes.NewReader(plaintext), &cipherBuf, correctPassword); err != nil {
t.Fatalf("StreamEncrypt() error = %v", err)
}
// Attempt decrypt with wrong password
var plainBuf bytes.Buffer
err := StreamDecrypt(bytes.NewReader(cipherBuf.Bytes()), &plainBuf, wrongPassword)
if err == nil {
t.Fatal("expected error when decrypting with wrong password, got nil")
}
}
func TestStreamDecrypt_Truncated_Bad(t *testing.T) {
plaintext := []byte("data that will be truncated after encryption")
password := "truncation-test"
// Encrypt
var cipherBuf bytes.Buffer
if err := StreamEncrypt(bytes.NewReader(plaintext), &cipherBuf, password); err != nil {
t.Fatalf("StreamEncrypt() error = %v", err)
}
encrypted := cipherBuf.Bytes()
// Truncate to just past the header (33 bytes) but before the full first block
if len(encrypted) > 40 {
truncated := encrypted[:40]
var plainBuf bytes.Buffer
err := StreamDecrypt(bytes.NewReader(truncated), &plainBuf, password)
if err == nil {
t.Fatal("expected error when decrypting truncated data, got nil")
}
}
// Truncate mid-way through the ciphertext
if len(encrypted) > headerSize+nonceSize+lengthSize+5 {
midpoint := headerSize + nonceSize + lengthSize + 5
truncated := encrypted[:midpoint]
var plainBuf bytes.Buffer
err := StreamDecrypt(bytes.NewReader(truncated), &plainBuf, password)
if err == nil {
t.Fatal("expected error when decrypting mid-block truncated data, got nil")
}
}
}
func TestStreamDecrypt_InvalidMagic_Bad(t *testing.T) {
// Construct data with wrong magic
data := []byte("NOPE\x02")
data = append(data, make([]byte, 28)...) // pad to header size
var plainBuf bytes.Buffer
err := StreamDecrypt(bytes.NewReader(data), &plainBuf, "password")
if err == nil {
t.Fatal("expected error for invalid magic, got nil")
}
}
func TestStreamDecrypt_InvalidVersion_Bad(t *testing.T) {
// Construct data with wrong version
data := []byte("STIM\x01")
data = append(data, make([]byte, 28)...) // pad to header size
var plainBuf bytes.Buffer
err := StreamDecrypt(bytes.NewReader(data), &plainBuf, "password")
if err == nil {
t.Fatal("expected error for unsupported version, got nil")
}
}
func TestStreamDecrypt_ShortHeader_Bad(t *testing.T) {
// Too short to contain full header
data := []byte("STIM\x02")
var plainBuf bytes.Buffer
err := StreamDecrypt(bytes.NewReader(data), &plainBuf, "password")
if err == nil {
t.Fatal("expected error for short header, got nil")
}
}
func TestStreamEncrypt_WriterError_Bad(t *testing.T) {
plaintext := []byte("test data")
// Use a writer that fails after a few bytes
w := &limitedWriter{limit: 5}
err := StreamEncrypt(bytes.NewReader(plaintext), w, "password")
if err == nil {
t.Fatal("expected error when writer fails, got nil")
}
}
// limitedWriter fails after writing limit bytes.
type limitedWriter struct {
limit int
written int
}
func (w *limitedWriter) Write(p []byte) (int, error) {
remaining := w.limit - w.written
if remaining <= 0 {
return 0, io.ErrShortWrite
}
if len(p) > remaining {
w.written += remaining
return remaining, io.ErrShortWrite
}
w.written += len(p)
return len(p), nil
}

View file

@ -11,10 +11,10 @@ import (
"io/fs"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
borgtrix "forge.lthn.ai/Snider/Borg/pkg/trix"
"forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix"
"forge.lthn.ai/Snider/Enchantrix/pkg/trix"
"github.com/Snider/Borg/pkg/datanode"
borgtrix "github.com/Snider/Borg/pkg/trix"
"github.com/Snider/Enchantrix/pkg/enchantrix"
"github.com/Snider/Enchantrix/pkg/trix"
)
var (

View file

@ -4,7 +4,7 @@ import (
"os"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/datanode"
)
func TestMain(m *testing.M) {

View file

@ -5,7 +5,7 @@ import (
"errors"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/datanode"
)
func TestNew(t *testing.T) {

View file

@ -2,16 +2,13 @@ package trix
import (
"crypto/sha256"
"encoding/binary"
"errors"
"fmt"
"golang.org/x/crypto/argon2"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"forge.lthn.ai/Snider/Enchantrix/pkg/crypt"
"forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix"
"forge.lthn.ai/Snider/Enchantrix/pkg/trix"
"github.com/Snider/Borg/pkg/datanode"
"github.com/Snider/Enchantrix/pkg/crypt"
"github.com/Snider/Enchantrix/pkg/enchantrix"
"github.com/Snider/Enchantrix/pkg/trix"
)
var (
@ -64,53 +61,11 @@ func FromTrix(data []byte, password string) (*datanode.DataNode, error) {
// DeriveKey derives a 32-byte key from a password using SHA-256.
// This is used for ChaCha20-Poly1305 encryption which requires a 32-byte key.
// Deprecated: Use DeriveKeyArgon2 for new code; this remains for backward compatibility.
func DeriveKey(password string) []byte {
hash := sha256.Sum256([]byte(password))
return hash[:]
}
// Argon2Params holds the tunable parameters for Argon2id key derivation.
type Argon2Params struct {
Time uint32
Memory uint32 // in KiB
Threads uint32
}
// DefaultArgon2Params returns sensible default parameters for Argon2id.
func DefaultArgon2Params() Argon2Params {
return Argon2Params{
Time: 3,
Memory: 64 * 1024,
Threads: 4,
}
}
// Encode serialises the Argon2Params as 12 bytes (3 x uint32 little-endian).
func (p Argon2Params) Encode() []byte {
buf := make([]byte, 12)
binary.LittleEndian.PutUint32(buf[0:4], p.Time)
binary.LittleEndian.PutUint32(buf[4:8], p.Memory)
binary.LittleEndian.PutUint32(buf[8:12], p.Threads)
return buf
}
// DecodeArgon2Params reads 12 bytes (3 x uint32 little-endian) into Argon2Params.
func DecodeArgon2Params(data []byte) Argon2Params {
return Argon2Params{
Time: binary.LittleEndian.Uint32(data[0:4]),
Memory: binary.LittleEndian.Uint32(data[4:8]),
Threads: binary.LittleEndian.Uint32(data[8:12]),
}
}
// DeriveKeyArgon2 derives a 32-byte key from a password and salt using Argon2id
// with DefaultArgon2Params. This is the recommended key derivation for new code.
func DeriveKeyArgon2(password string, salt []byte) []byte {
p := DefaultArgon2Params()
return argon2.IDKey([]byte(password), salt, p.Time, p.Memory, uint8(p.Threads), 32)
}
// ToTrixChaCha converts a DataNode to encrypted Trix format using ChaCha20-Poly1305.
func ToTrixChaCha(dn *datanode.DataNode, password string) ([]byte, error) {
if password == "" {

View file

@ -1,11 +1,9 @@
package trix
import (
"bytes"
"crypto/rand"
"testing"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/datanode"
)
func TestDeriveKey(t *testing.T) {
@ -238,85 +236,3 @@ func TestToTrixChaChaWithLargeData(t *testing.T) {
t.Fatalf("Failed to open large.bin: %v", err)
}
}
// --- Argon2id key derivation tests ---
func TestDeriveKeyArgon2_Good(t *testing.T) {
salt := make([]byte, 16)
if _, err := rand.Read(salt); err != nil {
t.Fatalf("failed to generate salt: %v", err)
}
key := DeriveKeyArgon2("test-password", salt)
if len(key) != 32 {
t.Fatalf("expected 32-byte key, got %d bytes", len(key))
}
}
func TestDeriveKeyArgon2_Deterministic_Good(t *testing.T) {
salt := []byte("fixed-salt-value")
key1 := DeriveKeyArgon2("same-password", salt)
key2 := DeriveKeyArgon2("same-password", salt)
if !bytes.Equal(key1, key2) {
t.Fatal("same password and salt must produce the same key")
}
}
func TestDeriveKeyArgon2_DifferentSalt_Good(t *testing.T) {
salt1 := []byte("salt-one-value!!")
salt2 := []byte("salt-two-value!!")
key1 := DeriveKeyArgon2("same-password", salt1)
key2 := DeriveKeyArgon2("same-password", salt2)
if bytes.Equal(key1, key2) {
t.Fatal("different salts must produce different keys")
}
}
func TestDeriveKeyLegacy_Good(t *testing.T) {
key1 := DeriveKey("backward-compat")
key2 := DeriveKey("backward-compat")
if len(key1) != 32 {
t.Fatalf("expected 32-byte key, got %d bytes", len(key1))
}
if !bytes.Equal(key1, key2) {
t.Fatal("legacy DeriveKey must be deterministic")
}
}
func TestArgon2Params_Good(t *testing.T) {
params := DefaultArgon2Params()
// Non-zero values
if params.Time == 0 {
t.Fatal("Time must be non-zero")
}
if params.Memory == 0 {
t.Fatal("Memory must be non-zero")
}
if params.Threads == 0 {
t.Fatal("Threads must be non-zero")
}
// Encode produces 12 bytes (3 x uint32 LE)
encoded := params.Encode()
if len(encoded) != 12 {
t.Fatalf("expected 12-byte encoding, got %d bytes", len(encoded))
}
// Round-trip: Decode must recover original values
decoded := DecodeArgon2Params(encoded)
if decoded.Time != params.Time {
t.Fatalf("Time mismatch: got %d, want %d", decoded.Time, params.Time)
}
if decoded.Memory != params.Memory {
t.Fatalf("Memory mismatch: got %d, want %d", decoded.Memory, params.Memory)
}
if decoded.Threads != params.Threads {
t.Fatalf("Threads mismatch: got %d, want %d", decoded.Threads, params.Threads)
}
}

View file

@ -1,93 +0,0 @@
package ui
import (
"fmt"
"io"
"os"
"github.com/mattn/go-isatty"
)
// Progress abstracts output for both interactive and scripted use.
type Progress interface {
Start(label string)
Update(current, total int64)
Finish(label string)
Log(level, msg string, args ...any)
}
// QuietProgress writes structured log lines. For cron, pipes, --quiet.
type QuietProgress struct {
w io.Writer
}
func NewQuietProgress(w io.Writer) *QuietProgress {
return &QuietProgress{w: w}
}
func (q *QuietProgress) Start(label string) {
fmt.Fprintf(q.w, "[START] %s\n", label)
}
func (q *QuietProgress) Update(current, total int64) {
if total > 0 {
fmt.Fprintf(q.w, "[PROGRESS] %d/%d\n", current, total)
}
}
func (q *QuietProgress) Finish(label string) {
fmt.Fprintf(q.w, "[DONE] %s\n", label)
}
func (q *QuietProgress) Log(level, msg string, args ...any) {
fmt.Fprintf(q.w, "[%s] %s", level, msg)
for i := 0; i+1 < len(args); i += 2 {
fmt.Fprintf(q.w, " %v=%v", args[i], args[i+1])
}
fmt.Fprintln(q.w)
}
// InteractiveProgress uses simple terminal output for TTY sessions.
type InteractiveProgress struct {
w io.Writer
}
func NewInteractiveProgress(w io.Writer) *InteractiveProgress {
return &InteractiveProgress{w: w}
}
func (p *InteractiveProgress) Start(label string) {
fmt.Fprintf(p.w, "→ %s\n", label)
}
func (p *InteractiveProgress) Update(current, total int64) {
if total > 0 {
pct := current * 100 / total
fmt.Fprintf(p.w, "\r %d%%", pct)
}
}
func (p *InteractiveProgress) Finish(label string) {
fmt.Fprintf(p.w, "\r✓ %s\n", label)
}
func (p *InteractiveProgress) Log(level, msg string, args ...any) {
fmt.Fprintf(p.w, " %s", msg)
for i := 0; i+1 < len(args); i += 2 {
fmt.Fprintf(p.w, " %v=%v", args[i], args[i+1])
}
fmt.Fprintln(p.w)
}
// IsTTY returns true if the given file descriptor is a terminal.
func IsTTY(fd uintptr) bool {
return isatty.IsTerminal(fd) || isatty.IsCygwinTerminal(fd)
}
// DefaultProgress returns InteractiveProgress for TTYs, QuietProgress otherwise.
func DefaultProgress() Progress {
if IsTTY(os.Stdout.Fd()) {
return NewInteractiveProgress(os.Stdout)
}
return NewQuietProgress(os.Stdout)
}

View file

@ -1,63 +0,0 @@
package ui
import (
"bytes"
"strings"
"testing"
)
func TestQuietProgress_Log_Good(t *testing.T) {
var buf bytes.Buffer
p := NewQuietProgress(&buf)
p.Log("info", "test message", "key", "val")
out := buf.String()
if !strings.Contains(out, "test message") {
t.Fatalf("expected log output to contain 'test message', got: %s", out)
}
}
func TestQuietProgress_StartFinish_Good(t *testing.T) {
var buf bytes.Buffer
p := NewQuietProgress(&buf)
p.Start("collecting")
p.Update(50, 100)
p.Finish("done")
out := buf.String()
if !strings.Contains(out, "collecting") {
t.Fatalf("expected 'collecting' in output, got: %s", out)
}
if !strings.Contains(out, "done") {
t.Fatalf("expected 'done' in output, got: %s", out)
}
}
func TestQuietProgress_Update_Ugly(t *testing.T) {
var buf bytes.Buffer
p := NewQuietProgress(&buf)
// Should not panic with zero total
p.Update(0, 0)
p.Update(5, 0)
}
func TestInteractiveProgress_StartFinish_Good(t *testing.T) {
var buf bytes.Buffer
p := NewInteractiveProgress(&buf)
p.Start("collecting")
p.Finish("done")
out := buf.String()
if !strings.Contains(out, "collecting") {
t.Fatalf("expected 'collecting', got: %s", out)
}
if !strings.Contains(out, "done") {
t.Fatalf("expected 'done', got: %s", out)
}
}
func TestInteractiveProgress_Update_Good(t *testing.T) {
var buf bytes.Buffer
p := NewInteractiveProgress(&buf)
p.Update(50, 100)
if !strings.Contains(buf.String(), "50%") {
t.Fatalf("expected '50%%', got: %s", buf.String())
}
}

View file

@ -5,7 +5,7 @@ import (
"os"
"path/filepath"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/datanode"
"github.com/go-git/go-git/v5"
)

View file

@ -11,9 +11,9 @@ import (
"encoding/json"
"syscall/js"
"forge.lthn.ai/Snider/Borg/pkg/smsg"
"forge.lthn.ai/Snider/Borg/pkg/stmf"
"forge.lthn.ai/Snider/Enchantrix/pkg/enchantrix"
"github.com/Snider/Borg/pkg/smsg"
"github.com/Snider/Borg/pkg/stmf"
"github.com/Snider/Enchantrix/pkg/enchantrix"
)
// Version of the WASM module

View file

@ -7,7 +7,7 @@ import (
"net/url"
"strings"
"forge.lthn.ai/Snider/Borg/pkg/datanode"
"github.com/Snider/Borg/pkg/datanode"
"github.com/schollz/progressbar/v3"
"golang.org/x/net/html"