feat: Add TDD tests with mocking for collect commands
This commit introduces a TDD testing framework for the `collect` commands. - A `TDD/` directory has been added to house the tests. - An environment variable `BORG_PLEXSUS=0` has been implemented to enable a mock mode, which prevents external network calls during testing. - The `collect` commands have been updated to use the command's output streams, allowing for output capturing in tests. - A `pkg/mocks` package has been added to provide mock implementations for testing. - The `.gitignore` file has been updated to exclude generated `.datanode` files.
This commit is contained in:
parent
c265e8cc3d
commit
74b7ba25a3
12 changed files with 225 additions and 42 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -1,3 +1,4 @@
|
|||
borg
|
||||
*.cube
|
||||
.task
|
||||
*.datanode
|
||||
|
|
|
|||
61
TDD/collect_commands_test.go
Normal file
61
TDD/collect_commands_test.go
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
package tdd_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"github.com/Snider/Borg/cmd"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCollectCommands(t *testing.T) {
|
||||
os.Setenv("BORG_PLEXSUS", "0")
|
||||
defer os.Unsetenv("BORG_PLEXSUS")
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
args []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "collect github repos",
|
||||
args: []string{"collect", "github", "repos", "test"},
|
||||
expected: "https://github.com/test/repo1.git",
|
||||
},
|
||||
{
|
||||
name: "collect github repo",
|
||||
args: []string{"collect", "github", "repo", "https://github.com/test/repo1.git"},
|
||||
expected: "Repository saved to repo.datanode",
|
||||
},
|
||||
{
|
||||
name: "collect pwa",
|
||||
args: []string{"collect", "pwa", "--uri", "http://test.com"},
|
||||
expected: "PWA saved to pwa.datanode",
|
||||
},
|
||||
{
|
||||
name: "collect website",
|
||||
args: []string{"collect", "website", "http://test.com"},
|
||||
expected: "Website saved to website.datanode",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
rootCmd := cmd.NewRootCmd()
|
||||
b := new(bytes.Buffer)
|
||||
rootCmd.SetOut(b)
|
||||
rootCmd.SetErr(b)
|
||||
rootCmd.SetArgs(tc.args)
|
||||
|
||||
err := rootCmd.ExecuteContext(context.Background())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !strings.Contains(b.String(), tc.expected) {
|
||||
t.Errorf("expected output to contain %q, but got %q", tc.expected, b.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -37,7 +37,7 @@ var collectGithubRepoCmd = &cobra.Command{
|
|||
|
||||
dn, err := vcs.CloneGitRepository(repoURL, progressWriter)
|
||||
if err != nil {
|
||||
fmt.Printf("Error cloning repository: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error cloning repository:", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -45,25 +45,25 @@ var collectGithubRepoCmd = &cobra.Command{
|
|||
if format == "matrix" {
|
||||
matrix, err := matrix.FromDataNode(dn)
|
||||
if err != nil {
|
||||
fmt.Printf("Error creating matrix: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error creating matrix:", err)
|
||||
return
|
||||
}
|
||||
data, err = matrix.ToTar()
|
||||
if err != nil {
|
||||
fmt.Printf("Error serializing matrix: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error serializing matrix:", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
data, err = dn.ToTar()
|
||||
if err != nil {
|
||||
fmt.Printf("Error serializing DataNode: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error serializing DataNode:", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
compressedData, err := compress.Compress(data, compression)
|
||||
if err != nil {
|
||||
fmt.Printf("Error compressing data: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error compressing data:", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -76,11 +76,11 @@ var collectGithubRepoCmd = &cobra.Command{
|
|||
|
||||
err = os.WriteFile(outputFile, compressedData, 0644)
|
||||
if err != nil {
|
||||
fmt.Printf("Error writing DataNode to file: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error writing DataNode to file:", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Repository saved to %s\n", outputFile)
|
||||
fmt.Fprintln(cmd.OutOrStdout(), "Repository saved to", outputFile)
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ var collectGithubReposCmd = &cobra.Command{
|
|||
return err
|
||||
}
|
||||
for _, repo := range repos {
|
||||
fmt.Println(repo)
|
||||
fmt.Fprintln(cmd.OutOrStdout(), repo)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ Example:
|
|||
compression, _ := cmd.Flags().GetString("compression")
|
||||
|
||||
if pwaURL == "" {
|
||||
fmt.Println("Error: uri is required")
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error: uri is required")
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -36,13 +36,13 @@ Example:
|
|||
|
||||
manifestURL, err := pwa.FindManifest(pwaURL)
|
||||
if err != nil {
|
||||
fmt.Printf("Error finding manifest: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error finding manifest:", err)
|
||||
return
|
||||
}
|
||||
bar.Describe("Downloading and packaging PWA")
|
||||
dn, err := pwa.DownloadAndPackagePWA(pwaURL, manifestURL, bar)
|
||||
if err != nil {
|
||||
fmt.Printf("Error downloading and packaging PWA: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error downloading and packaging PWA:", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -50,25 +50,25 @@ Example:
|
|||
if format == "matrix" {
|
||||
matrix, err := matrix.FromDataNode(dn)
|
||||
if err != nil {
|
||||
fmt.Printf("Error creating matrix: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error creating matrix:", err)
|
||||
return
|
||||
}
|
||||
data, err = matrix.ToTar()
|
||||
if err != nil {
|
||||
fmt.Printf("Error serializing matrix: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error serializing matrix:", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
data, err = dn.ToTar()
|
||||
if err != nil {
|
||||
fmt.Printf("Error serializing DataNode: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error serializing DataNode:", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
compressedData, err := compress.Compress(data, compression)
|
||||
if err != nil {
|
||||
fmt.Printf("Error compressing data: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error compressing data:", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -81,11 +81,11 @@ Example:
|
|||
|
||||
err = os.WriteFile(outputFile, compressedData, 0644)
|
||||
if err != nil {
|
||||
fmt.Printf("Error writing PWA to file: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error writing PWA to file:", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("PWA saved to %s\n", outputFile)
|
||||
fmt.Fprintln(cmd.OutOrStdout(), "PWA saved to", outputFile)
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ var collectWebsiteCmd = &cobra.Command{
|
|||
|
||||
dn, err := website.DownloadAndPackageWebsite(websiteURL, depth, bar)
|
||||
if err != nil {
|
||||
fmt.Printf("Error downloading and packaging website: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error downloading and packaging website:", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -44,25 +44,25 @@ var collectWebsiteCmd = &cobra.Command{
|
|||
if format == "matrix" {
|
||||
matrix, err := matrix.FromDataNode(dn)
|
||||
if err != nil {
|
||||
fmt.Printf("Error creating matrix: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error creating matrix:", err)
|
||||
return
|
||||
}
|
||||
data, err = matrix.ToTar()
|
||||
if err != nil {
|
||||
fmt.Printf("Error serializing matrix: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error serializing matrix:", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
data, err = dn.ToTar()
|
||||
if err != nil {
|
||||
fmt.Printf("Error serializing DataNode: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error serializing DataNode:", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
compressedData, err := compress.Compress(data, compression)
|
||||
if err != nil {
|
||||
fmt.Printf("Error compressing data: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error compressing data:", err)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -75,11 +75,11 @@ var collectWebsiteCmd = &cobra.Command{
|
|||
|
||||
err = os.WriteFile(outputFile, compressedData, 0644)
|
||||
if err != nil {
|
||||
fmt.Printf("Error writing website to file: %v\n", err)
|
||||
fmt.Fprintln(cmd.ErrOrStderr(), "Error writing website to file:", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Website saved to %s\n", outputFile)
|
||||
fmt.Fprintln(cmd.OutOrStdout(), "Website saved to", outputFile)
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
23
cmd/root.go
23
cmd/root.go
|
|
@ -7,21 +7,26 @@ import (
|
|||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// RootCmd represents the base command when called without any subcommands
|
||||
var RootCmd = &cobra.Command{
|
||||
Use: "borg-data-collector",
|
||||
Short: "A tool for collecting and managing data.",
|
||||
Long: `Borg Data Collector is a command-line tool for cloning Git repositories,
|
||||
func NewRootCmd() *cobra.Command {
|
||||
rootCmd := &cobra.Command{
|
||||
Use: "borg-data-collector",
|
||||
Short: "A tool for collecting and managing data.",
|
||||
Long: `Borg Data Collector is a command-line tool for cloning Git repositories,
|
||||
packaging their contents into a single file, and managing the data within.`,
|
||||
}
|
||||
rootCmd.AddCommand(allCmd)
|
||||
rootCmd.AddCommand(collectCmd)
|
||||
rootCmd.AddCommand(serveCmd)
|
||||
rootCmd.PersistentFlags().BoolP("verbose", "v", false, "Enable verbose logging")
|
||||
return rootCmd
|
||||
}
|
||||
|
||||
// RootCmd represents the base command when called without any subcommands
|
||||
var RootCmd = NewRootCmd()
|
||||
|
||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
||||
func Execute(log *slog.Logger) error {
|
||||
RootCmd.SetContext(context.WithValue(context.Background(), "logger", log))
|
||||
return RootCmd.Execute()
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.PersistentFlags().BoolP("verbose", "v", false, "Enable verbose logging")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
package github
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/Snider/Borg/pkg/mocks"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
|
@ -20,6 +23,22 @@ func GetPublicRepos(ctx context.Context, userOrOrg string) ([]string, error) {
|
|||
}
|
||||
|
||||
func newAuthenticatedClient(ctx context.Context) *http.Client {
|
||||
if os.Getenv("BORG_PLEXSUS") == "0" {
|
||||
// Define mock responses for testing
|
||||
responses := map[string]*http.Response{
|
||||
"https://api.github.com/users/test/repos": {
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`[{"clone_url": "https://github.com/test/repo1.git"}]`)),
|
||||
Header: make(http.Header),
|
||||
},
|
||||
"https://api.github.com/orgs/test/repos": {
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`[{"clone_url": "https.github.com/test/repo2.git"}]`)),
|
||||
Header: make(http.Header),
|
||||
},
|
||||
}
|
||||
return mocks.NewMockClient(responses)
|
||||
}
|
||||
token := os.Getenv("GITHUB_TOKEN")
|
||||
if token == "" {
|
||||
return http.DefaultClient
|
||||
|
|
|
|||
38
pkg/mocks/http.go
Normal file
38
pkg/mocks/http.go
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
package mocks
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// MockRoundTripper is a mock implementation of http.RoundTripper.
|
||||
type MockRoundTripper struct {
|
||||
Responses map[string]*http.Response
|
||||
}
|
||||
|
||||
// RoundTrip implements the http.RoundTripper interface.
|
||||
func (m *MockRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
url := req.URL.String()
|
||||
if resp, ok := m.Responses[url]; ok {
|
||||
// Create a new reader for the body each time, as it can be read only once.
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
resp.Body.Close() // close original body
|
||||
resp.Body = io.NopCloser(bytes.NewReader(bodyBytes))
|
||||
return resp, nil
|
||||
}
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusNotFound,
|
||||
Body: io.NopCloser(bytes.NewBufferString("Not Found")),
|
||||
Header: make(http.Header),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// NewMockClient creates a new http.Client with a MockRoundTripper.
|
||||
func NewMockClient(responses map[string]*http.Response) *http.Client {
|
||||
return &http.Client{
|
||||
Transport: &MockRoundTripper{
|
||||
Responses: responses,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -1,11 +1,14 @@
|
|||
package pwa
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/Snider/Borg/pkg/mocks"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/Snider/Borg/pkg/datanode"
|
||||
|
|
@ -29,9 +32,34 @@ type Icon struct {
|
|||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
var getHTTPClient = func() *http.Client {
|
||||
if os.Getenv("BORG_PLEXSUS") == "0" {
|
||||
responses := map[string]*http.Response{
|
||||
"http://test.com": {
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`<html><head><link rel="manifest" href="manifest.json"></head></html>`)),
|
||||
Header: make(http.Header),
|
||||
},
|
||||
"http://test.com/manifest.json": {
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"name": "Test PWA", "start_url": "index.html"}`)),
|
||||
Header: make(http.Header),
|
||||
},
|
||||
"http://test.com/index.html": {
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`<html><body>Hello</body></html>`)),
|
||||
Header: make(http.Header),
|
||||
},
|
||||
}
|
||||
return mocks.NewMockClient(responses)
|
||||
}
|
||||
return http.DefaultClient
|
||||
}
|
||||
|
||||
// FindManifest finds the manifest URL from a given HTML page.
|
||||
func FindManifest(pageURL string) (string, error) {
|
||||
resp, err := http.Get(pageURL)
|
||||
client := getHTTPClient()
|
||||
resp, err := client.Get(pageURL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
|
@ -90,7 +118,8 @@ func DownloadAndPackagePWA(baseURL string, manifestURL string, bar *progressbar.
|
|||
return nil, fmt.Errorf("could not resolve manifest URL: %w", err)
|
||||
}
|
||||
|
||||
resp, err := http.Get(manifestAbsURL.String())
|
||||
client := getHTTPClient()
|
||||
resp, err := client.Get(manifestAbsURL.String())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not download manifest: %w", err)
|
||||
}
|
||||
|
|
@ -154,7 +183,8 @@ func resolveURL(base, ref string) (*url.URL, error) {
|
|||
}
|
||||
|
||||
func downloadAndAddFile(dn *datanode.DataNode, fileURL *url.URL, internalPath string, bar *progressbar.ProgressBar) error {
|
||||
resp, err := http.Get(fileURL.String())
|
||||
client := getHTTPClient()
|
||||
resp, err := client.Get(fileURL.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,11 @@ import (
|
|||
|
||||
// CloneGitRepository clones a Git repository from a URL and packages it into a DataNode.
|
||||
func CloneGitRepository(repoURL string, progress io.Writer) (*datanode.DataNode, error) {
|
||||
if os.Getenv("BORG_PLEXSUS") == "0" {
|
||||
dn := datanode.New()
|
||||
dn.AddData("README.md", []byte("Mock README"))
|
||||
return dn, nil
|
||||
}
|
||||
tempPath, err := os.MkdirTemp("", "borg-clone-*")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
package website
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/Snider/Borg/pkg/mocks"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Snider/Borg/pkg/datanode"
|
||||
|
|
@ -13,13 +16,33 @@ import (
|
|||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
var getHTTPClient = func() *http.Client {
|
||||
if os.Getenv("BORG_PLEXSUS") == "0" {
|
||||
responses := map[string]*http.Response{
|
||||
"http://test.com": {
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`<html><body><a href="page2.html">Page 2</a></body></html>`)),
|
||||
Header: make(http.Header),
|
||||
},
|
||||
"http://test.com/page2.html": {
|
||||
StatusCode: http.StatusOK,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`<html><body>Hello</body></html>`)),
|
||||
Header: make(http.Header),
|
||||
},
|
||||
}
|
||||
return mocks.NewMockClient(responses)
|
||||
}
|
||||
return http.DefaultClient
|
||||
}
|
||||
|
||||
// Downloader is a recursive website downloader.
|
||||
type Downloader struct {
|
||||
baseURL *url.URL
|
||||
dn *datanode.DataNode
|
||||
visited map[string]bool
|
||||
maxDepth int
|
||||
baseURL *url.URL
|
||||
dn *datanode.DataNode
|
||||
visited map[string]bool
|
||||
maxDepth int
|
||||
progressBar *progressbar.ProgressBar
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
// NewDownloader creates a new Downloader.
|
||||
|
|
@ -28,6 +51,7 @@ func NewDownloader(maxDepth int) *Downloader {
|
|||
dn: datanode.New(),
|
||||
visited: make(map[string]bool),
|
||||
maxDepth: maxDepth,
|
||||
client: getHTTPClient(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -55,7 +79,7 @@ func (d *Downloader) crawl(pageURL string, depth int) {
|
|||
d.progressBar.Add(1)
|
||||
}
|
||||
|
||||
resp, err := http.Get(pageURL)
|
||||
resp, err := d.client.Get(pageURL)
|
||||
if err != nil {
|
||||
fmt.Printf("Error getting %s: %v\n", pageURL, err)
|
||||
return
|
||||
|
|
@ -112,7 +136,7 @@ func (d *Downloader) downloadAsset(assetURL string) {
|
|||
d.progressBar.Add(1)
|
||||
}
|
||||
|
||||
resp, err := http.Get(assetURL)
|
||||
resp, err := d.client.Get(assetURL)
|
||||
if err != nil {
|
||||
fmt.Printf("Error getting asset %s: %v\n", assetURL, err)
|
||||
return
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue