Compare commits
19 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d08d2eb1fc | ||
|
|
6e278a293a | ||
|
|
c50257fa49 | ||
|
|
93c8eef876 | ||
|
|
ae3935919e | ||
|
|
af9887217a | ||
|
|
c06fd2edfc | ||
|
|
cbf650918a | ||
|
|
6eef0ff234 | ||
|
|
04d8a17dc7 | ||
|
|
0179ddf4f2 | ||
|
|
29cbec8575 | ||
|
|
b5d32ade33 | ||
|
|
24fd01dc26 | ||
|
|
ba08cac5ef | ||
|
|
f3c5fe9a7b | ||
|
|
fa20cb8aa5 | ||
|
|
a4d8aba714 | ||
|
|
b7d70883e9 |
35 changed files with 1759 additions and 119 deletions
|
|
@ -1,8 +1,8 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// addAPICommands adds the 'api' command and its subcommands to the given parent command.
|
||||
|
|
@ -17,6 +17,6 @@ func addAPICommands(parent *cli.Command) {
|
|||
// Add the 'sync' command to 'api'
|
||||
addSyncCommand(apiCmd)
|
||||
|
||||
// TODO: Add the 'test-gen' command to 'api'
|
||||
// addTestGenCommand(apiCmd)
|
||||
// Add the 'test-gen' command to 'api'
|
||||
addTestGenCommand(apiCmd)
|
||||
}
|
||||
|
|
|
|||
112
cmd/dev/cmd_api_testgen.go
Normal file
112
cmd/dev/cmd_api_testgen.go
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"path/filepath"
|
||||
"text/template"
|
||||
|
||||
"dappco.re/go/core/i18n"
|
||||
coreio "dappco.re/go/core/io"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
func addTestGenCommand(parent *cli.Command) {
|
||||
testGenCmd := &cli.Command{
|
||||
Use: "test-gen",
|
||||
Short: i18n.T("cmd.dev.api.test_gen.short"),
|
||||
Long: i18n.T("cmd.dev.api.test_gen.long"),
|
||||
RunE: func(cmd *cli.Command, args []string) error {
|
||||
if err := runTestGen(); err != nil {
|
||||
return cli.Wrap(err, i18n.Label("error"))
|
||||
}
|
||||
cli.Text(i18n.T("i18n.done.sync", "public API tests"))
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
parent.AddCommand(testGenCmd)
|
||||
}
|
||||
|
||||
func runTestGen() error {
|
||||
pkgDir := "pkg"
|
||||
internalDirs, err := coreio.Local.List(pkgDir)
|
||||
if err != nil {
|
||||
return cli.Wrap(err, "failed to read pkg directory")
|
||||
}
|
||||
|
||||
for _, dir := range internalDirs {
|
||||
if !dir.IsDir() || dir.Name() == "core" {
|
||||
continue
|
||||
}
|
||||
|
||||
serviceName := dir.Name()
|
||||
internalDir := filepath.Join(pkgDir, serviceName)
|
||||
publicDir := serviceName
|
||||
publicTestFile := filepath.Join(publicDir, serviceName+"_test.go")
|
||||
|
||||
if !coreio.Local.Exists(internalDir) {
|
||||
continue
|
||||
}
|
||||
|
||||
symbols, err := getExportedSymbols(internalDir)
|
||||
if err != nil {
|
||||
return cli.Wrap(err, cli.Sprintf("error getting symbols for service '%s'", serviceName))
|
||||
}
|
||||
|
||||
if len(symbols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := generatePublicAPITestFile(publicDir, publicTestFile, serviceName, symbols); err != nil {
|
||||
return cli.Wrap(err, cli.Sprintf("error generating public API test file for service '%s'", serviceName))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
const publicAPITestTemplate = `// Code generated by "core dev api test-gen"; DO NOT EDIT.
|
||||
package {{.ServiceName}}
|
||||
|
||||
import (
|
||||
impl "forge.lthn.ai/core/cli/{{.ServiceName}}"
|
||||
)
|
||||
|
||||
{{range .Symbols}}
|
||||
{{- if eq .Kind "type"}}
|
||||
type _ = impl.{{.Name}}
|
||||
{{- else if eq .Kind "const"}}
|
||||
const _ = impl.{{.Name}}
|
||||
{{- else if eq .Kind "var"}}
|
||||
var _ = impl.{{.Name}}
|
||||
{{- else if eq .Kind "func"}}
|
||||
var _ = impl.{{.Name}}
|
||||
{{- end}}
|
||||
{{end}}
|
||||
`
|
||||
|
||||
func generatePublicAPITestFile(dir, path, serviceName string, symbols []symbolInfo) error {
|
||||
if err := coreio.Local.EnsureDir(dir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tmpl, err := template.New("publicAPITest").Parse(publicAPITestTemplate)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
data := struct {
|
||||
ServiceName string
|
||||
Symbols []symbolInfo
|
||||
}{
|
||||
ServiceName: serviceName,
|
||||
Symbols: symbols,
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := tmpl.Execute(&buf, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return coreio.Local.Write(path, buf.String())
|
||||
}
|
||||
115
cmd/dev/cmd_api_testgen_test.go
Normal file
115
cmd/dev/cmd_api_testgen_test.go
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"dappco.re/go/core/io"
|
||||
)
|
||||
|
||||
func TestRunTestGen_Good(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
originalWD, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
_ = os.Chdir(originalWD)
|
||||
})
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
serviceDir := filepath.Join(tmpDir, "pkg", "demo")
|
||||
require.NoError(t, io.Local.EnsureDir(serviceDir))
|
||||
require.NoError(t, io.Local.Write(filepath.Join(serviceDir, "demo.go"), `package demo
|
||||
|
||||
type Example struct{}
|
||||
|
||||
const Answer = 42
|
||||
|
||||
var Value = Example{}
|
||||
|
||||
func Run() {}
|
||||
`))
|
||||
require.NoError(t, io.Local.Write(filepath.Join(serviceDir, "extra.go"), `package demo
|
||||
|
||||
type Another struct{}
|
||||
|
||||
func Extra() {}
|
||||
`))
|
||||
require.NoError(t, io.Local.Write(filepath.Join(serviceDir, "demo_test.go"), `package demo
|
||||
|
||||
func Ignored() {}
|
||||
`))
|
||||
|
||||
require.NoError(t, runTestGen())
|
||||
|
||||
generatedPath := filepath.Join(tmpDir, "demo", "demo_test.go")
|
||||
content, err := io.Local.Read(generatedPath)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Contains(t, content, `// Code generated by "core dev api test-gen"; DO NOT EDIT.`)
|
||||
require.Contains(t, content, `package demo`)
|
||||
require.Contains(t, content, `impl "forge.lthn.ai/core/cli/demo"`)
|
||||
require.Contains(t, content, `type _ = impl.Example`)
|
||||
require.Contains(t, content, `type _ = impl.Another`)
|
||||
require.Contains(t, content, `const _ = impl.Answer`)
|
||||
require.Contains(t, content, `var _ = impl.Value`)
|
||||
require.Contains(t, content, `var _ = impl.Run`)
|
||||
require.Contains(t, content, `var _ = impl.Extra`)
|
||||
require.NotContains(t, content, `Ignored`)
|
||||
}
|
||||
|
||||
func TestGeneratePublicAPITestFile_Good(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
require.NoError(t, generatePublicAPITestFile(
|
||||
filepath.Join(tmpDir, "demo"),
|
||||
filepath.Join(tmpDir, "demo", "demo_test.go"),
|
||||
"demo",
|
||||
[]symbolInfo{
|
||||
{Name: "Example", Kind: "type"},
|
||||
{Name: "Answer", Kind: "const"},
|
||||
},
|
||||
))
|
||||
|
||||
content, err := io.Local.Read(filepath.Join(tmpDir, "demo", "demo_test.go"))
|
||||
require.NoError(t, err)
|
||||
|
||||
require.True(t, strings.Contains(content, `type _ = impl.Example`))
|
||||
require.True(t, strings.Contains(content, `const _ = impl.Answer`))
|
||||
}
|
||||
|
||||
func TestGetExportedSymbols_Good_MultiFile(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
serviceDir := filepath.Join(tmpDir, "demo")
|
||||
require.NoError(t, io.Local.EnsureDir(serviceDir))
|
||||
require.NoError(t, io.Local.Write(filepath.Join(serviceDir, "demo.go"), `package demo
|
||||
|
||||
type Example struct{}
|
||||
|
||||
const Answer = 42
|
||||
`))
|
||||
require.NoError(t, io.Local.Write(filepath.Join(serviceDir, "extra.go"), `package demo
|
||||
|
||||
var Value = Example{}
|
||||
|
||||
func Run() {}
|
||||
`))
|
||||
require.NoError(t, io.Local.Write(filepath.Join(serviceDir, "demo_test.go"), `package demo
|
||||
|
||||
type Ignored struct{}
|
||||
`))
|
||||
|
||||
symbols, err := getExportedSymbols(serviceDir)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, []symbolInfo{
|
||||
{Name: "Answer", Kind: "const"},
|
||||
{Name: "Example", Kind: "type"},
|
||||
{Name: "Run", Kind: "func"},
|
||||
{Name: "Value", Kind: "var"},
|
||||
}, symbols)
|
||||
}
|
||||
|
|
@ -12,14 +12,14 @@ import (
|
|||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sort"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
core "dappco.re/go/core/log"
|
||||
"dappco.re/go/core/scm/git"
|
||||
"dappco.re/go/core/i18n"
|
||||
"dappco.re/go/core/io"
|
||||
core "dappco.re/go/core/log"
|
||||
"dappco.re/go/core/scm/git"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// Apply command flags
|
||||
|
|
@ -235,29 +235,39 @@ func getApplyTargetRepos() ([]*repos.Repo, error) {
|
|||
return nil, core.E("dev.apply", "failed to load registry", err)
|
||||
}
|
||||
|
||||
// If --repos specified, filter to those
|
||||
if applyRepos != "" {
|
||||
repoNames := strings.Split(applyRepos, ",")
|
||||
nameSet := make(map[string]bool)
|
||||
for _, name := range repoNames {
|
||||
nameSet[strings.TrimSpace(name)] = true
|
||||
}
|
||||
return filterTargetRepos(registry, applyRepos), nil
|
||||
}
|
||||
|
||||
var matched []*repos.Repo
|
||||
for _, repo := range registry.Repos {
|
||||
if nameSet[repo.Name] {
|
||||
// filterTargetRepos selects repos by exact name/path or glob pattern.
|
||||
func filterTargetRepos(registry *repos.Registry, selection string) []*repos.Repo {
|
||||
repoNames := make([]string, 0, len(registry.Repos))
|
||||
for name := range registry.Repos {
|
||||
repoNames = append(repoNames, name)
|
||||
}
|
||||
sort.Strings(repoNames)
|
||||
|
||||
if selection == "" {
|
||||
matched := make([]*repos.Repo, 0, len(repoNames))
|
||||
for _, name := range repoNames {
|
||||
matched = append(matched, registry.Repos[name])
|
||||
}
|
||||
return matched
|
||||
}
|
||||
|
||||
patterns := splitPatterns(selection)
|
||||
var matched []*repos.Repo
|
||||
|
||||
for _, name := range repoNames {
|
||||
repo := registry.Repos[name]
|
||||
for _, candidate := range patterns {
|
||||
if matchGlob(repo.Name, candidate) || matchGlob(repo.Path, candidate) {
|
||||
matched = append(matched, repo)
|
||||
break
|
||||
}
|
||||
}
|
||||
return matched, nil
|
||||
}
|
||||
|
||||
// Return all repos as slice
|
||||
var all []*repos.Repo
|
||||
for _, repo := range registry.Repos {
|
||||
all = append(all, repo)
|
||||
}
|
||||
return all, nil
|
||||
return matched
|
||||
}
|
||||
|
||||
// runCommandInRepo runs a shell command in a repo directory
|
||||
|
|
|
|||
39
cmd/dev/cmd_apply_test.go
Normal file
39
cmd/dev/cmd_apply_test.go
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"dappco.re/go/core/scm/repos"
|
||||
)
|
||||
|
||||
func TestFilterTargetRepos_Good(t *testing.T) {
|
||||
registry := &repos.Registry{
|
||||
Repos: map[string]*repos.Repo{
|
||||
"core-api": &repos.Repo{Name: "core-api", Path: "packages/core-api"},
|
||||
"core-web": &repos.Repo{Name: "core-web", Path: "packages/core-web"},
|
||||
"docs-site": &repos.Repo{Name: "docs-site", Path: "sites/docs"},
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("exact names", func(t *testing.T) {
|
||||
matched := filterTargetRepos(registry, "core-api,docs-site")
|
||||
require.Len(t, matched, 2)
|
||||
require.Equal(t, "core-api", matched[0].Name)
|
||||
require.Equal(t, "docs-site", matched[1].Name)
|
||||
})
|
||||
|
||||
t.Run("glob patterns", func(t *testing.T) {
|
||||
matched := filterTargetRepos(registry, "core-*,sites/*")
|
||||
require.Len(t, matched, 3)
|
||||
require.Equal(t, "core-api", matched[0].Name)
|
||||
require.Equal(t, "core-web", matched[1].Name)
|
||||
require.Equal(t, "docs-site", matched[2].Name)
|
||||
})
|
||||
|
||||
t.Run("all repos when empty", func(t *testing.T) {
|
||||
matched := filterTargetRepos(registry, "")
|
||||
require.Len(t, matched, 3)
|
||||
})
|
||||
}
|
||||
|
|
@ -19,6 +19,7 @@
|
|||
//
|
||||
// API Tools:
|
||||
// - api sync: Synchronize public service APIs
|
||||
// - api test-gen: Generate compile-time API test stubs
|
||||
//
|
||||
// Dev Environment (VM management):
|
||||
// - install: Download dev environment image
|
||||
|
|
@ -33,8 +34,8 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
|
||||
_ "dappco.re/go/core/devops/locales"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,12 +14,12 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/scm/git"
|
||||
"dappco.re/go/core/i18n"
|
||||
coreio "dappco.re/go/core/io"
|
||||
"dappco.re/go/core/log"
|
||||
"dappco.re/go/core/scm/git"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// File sync command flags
|
||||
|
|
@ -29,6 +29,7 @@ var (
|
|||
fileSyncCoAuthor string
|
||||
fileSyncDryRun bool
|
||||
fileSyncPush bool
|
||||
fileSyncYes bool
|
||||
)
|
||||
|
||||
// AddFileSyncCommand adds the 'sync' command to dev for file syncing.
|
||||
|
|
@ -48,6 +49,7 @@ func AddFileSyncCommand(parent *cli.Command) {
|
|||
syncCmd.Flags().StringVar(&fileSyncCoAuthor, "co-author", "", i18n.T("cmd.dev.file_sync.flag.co_author"))
|
||||
syncCmd.Flags().BoolVar(&fileSyncDryRun, "dry-run", false, i18n.T("cmd.dev.file_sync.flag.dry_run"))
|
||||
syncCmd.Flags().BoolVar(&fileSyncPush, "push", false, i18n.T("cmd.dev.file_sync.flag.push"))
|
||||
syncCmd.Flags().BoolVarP(&fileSyncYes, "yes", "y", false, i18n.T("cmd.dev.file_sync.flag.yes"))
|
||||
|
||||
_ = syncCmd.MarkFlagRequired("to")
|
||||
|
||||
|
|
@ -64,23 +66,6 @@ func runFileSync(source string) error {
|
|||
|
||||
// Validate source exists
|
||||
sourceInfo, err := os.Stat(source) // Keep os.Stat for local source check or use coreio? coreio.Local.IsFile is bool.
|
||||
// If source is local file on disk (not in medium), we can use os.Stat.
|
||||
// But concept is everything is via Medium?
|
||||
// User is running CLI on host. `source` is relative to CWD.
|
||||
// coreio.Local uses absolute path or relative to root (which is "/" by default).
|
||||
// So coreio.Local works.
|
||||
if !coreio.Local.IsFile(source) {
|
||||
// Might be directory
|
||||
// IsFile returns false for directory.
|
||||
}
|
||||
// Let's rely on os.Stat for initial source check to distinguish dir vs file easily if coreio doesn't expose Stat.
|
||||
// coreio doesn't expose Stat.
|
||||
|
||||
// Check using standard os for source determination as we are outside strict sandbox for input args potentially?
|
||||
// But we should use coreio where possible.
|
||||
// coreio.Local.List worked for dirs.
|
||||
// Let's stick to os.Stat for source properties finding as typically allowed for CLI args.
|
||||
|
||||
if err != nil {
|
||||
return log.E("dev.sync", i18n.T("cmd.dev.file_sync.error.source_not_found", map[string]any{"Path": source}), err)
|
||||
}
|
||||
|
|
@ -103,6 +88,16 @@ func runFileSync(source string) error {
|
|||
}
|
||||
cli.Blank()
|
||||
|
||||
if !fileSyncDryRun && !fileSyncYes {
|
||||
cli.Print("%s\n", warningStyle.Render(i18n.T("cmd.dev.file_sync.warning")))
|
||||
cli.Blank()
|
||||
if !cli.Confirm(i18n.T("cmd.dev.file_sync.confirm")) {
|
||||
cli.Text(i18n.T("cli.aborted"))
|
||||
return nil
|
||||
}
|
||||
cli.Blank()
|
||||
}
|
||||
|
||||
var succeeded, skipped, failed int
|
||||
|
||||
for _, repo := range targetRepos {
|
||||
|
|
@ -219,22 +214,48 @@ func resolveTargetRepos(pattern string) ([]*repos.Repo, error) {
|
|||
|
||||
// Match pattern against repo names
|
||||
var matched []*repos.Repo
|
||||
patterns := splitPatterns(pattern)
|
||||
for _, repo := range registry.Repos {
|
||||
if matchGlob(repo.Name, pattern) || matchGlob(repo.Path, pattern) {
|
||||
matched = append(matched, repo)
|
||||
for _, candidate := range patterns {
|
||||
if matchGlob(repo.Name, candidate) || matchGlob(repo.Path, candidate) {
|
||||
matched = append(matched, repo)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matched, nil
|
||||
}
|
||||
|
||||
// splitPatterns normalises comma-separated glob patterns.
|
||||
func splitPatterns(pattern string) []string {
|
||||
raw := strings.Split(pattern, ",")
|
||||
out := make([]string, 0, len(raw))
|
||||
|
||||
for _, p := range raw {
|
||||
p = strings.TrimSpace(p)
|
||||
if p == "" {
|
||||
continue
|
||||
}
|
||||
out = append(out, p)
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// matchGlob performs simple glob matching with * wildcards
|
||||
func matchGlob(s, pattern string) bool {
|
||||
// Handle exact match
|
||||
// Handle exact match and simple glob patterns.
|
||||
if s == pattern {
|
||||
return true
|
||||
}
|
||||
|
||||
matched, err := filepath.Match(pattern, s)
|
||||
if err == nil {
|
||||
return matched
|
||||
}
|
||||
|
||||
// Fallback to legacy wildcard rules for invalid glob patterns.
|
||||
// Handle * at end
|
||||
if strings.HasSuffix(pattern, "*") {
|
||||
prefix := strings.TrimSuffix(pattern, "*")
|
||||
|
|
|
|||
40
cmd/dev/cmd_file_sync_test.go
Normal file
40
cmd/dev/cmd_file_sync_test.go
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
func TestAddFileSyncCommand_Good(t *testing.T) {
|
||||
root := &cli.Command{Use: "core"}
|
||||
|
||||
AddDevCommands(root)
|
||||
|
||||
syncCmd, _, err := root.Find([]string{"dev", "sync"})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, syncCmd)
|
||||
|
||||
yesFlag := syncCmd.Flags().Lookup("yes")
|
||||
require.NotNil(t, yesFlag)
|
||||
require.Equal(t, "y", yesFlag.Shorthand)
|
||||
|
||||
require.NotNil(t, syncCmd.Flags().Lookup("dry-run"))
|
||||
require.NotNil(t, syncCmd.Flags().Lookup("push"))
|
||||
}
|
||||
|
||||
func TestSplitPatterns_Good(t *testing.T) {
|
||||
patterns := splitPatterns("packages/core-*, apps/* ,services/*,")
|
||||
require.Equal(t, []string{"packages/core-*", "apps/*", "services/*"}, patterns)
|
||||
}
|
||||
|
||||
func TestMatchGlob_Good(t *testing.T) {
|
||||
require.True(t, matchGlob("packages/core-xyz", "packages/core-*"))
|
||||
require.True(t, matchGlob("packages/core-xyz", "*/core-*"))
|
||||
require.True(t, matchGlob("a-b", "a?b"))
|
||||
require.True(t, matchGlob("foo", "foo"))
|
||||
require.False(t, matchGlob("core-other", "packages/*"))
|
||||
require.False(t, matchGlob("abc", "[]"))
|
||||
}
|
||||
|
|
@ -6,12 +6,15 @@ import (
|
|||
"go/parser"
|
||||
"go/token"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli" // Added
|
||||
"dappco.re/go/core/i18n" // Added
|
||||
"dappco.re/go/core/i18n"
|
||||
coreio "dappco.re/go/core/io"
|
||||
// Added
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
|
@ -52,15 +55,15 @@ func runSync() error {
|
|||
}
|
||||
|
||||
serviceName := dir.Name()
|
||||
internalFile := filepath.Join(pkgDir, serviceName, serviceName+".go")
|
||||
internalDir := filepath.Join(pkgDir, serviceName)
|
||||
publicDir := serviceName
|
||||
publicFile := filepath.Join(publicDir, serviceName+".go")
|
||||
|
||||
if !coreio.Local.IsFile(internalFile) {
|
||||
if !coreio.Local.Exists(internalDir) {
|
||||
continue
|
||||
}
|
||||
|
||||
symbols, err := getExportedSymbols(internalFile)
|
||||
symbols, err := getExportedSymbols(internalDir)
|
||||
if err != nil {
|
||||
return cli.Wrap(err, cli.Sprintf("error getting symbols for service '%s'", serviceName))
|
||||
}
|
||||
|
|
@ -74,23 +77,29 @@ func runSync() error {
|
|||
}
|
||||
|
||||
func getExportedSymbols(path string) ([]symbolInfo, error) {
|
||||
// ParseFile expects a filename/path and reads it using os.Open by default if content is nil.
|
||||
// Since we want to use our Medium abstraction, we should read the file content first.
|
||||
content, err := coreio.Local.Read(path)
|
||||
files, err := listGoFiles(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fset := token.NewFileSet()
|
||||
// ParseFile can take content as string (src argument).
|
||||
node, err := parser.ParseFile(fset, path, content, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
symbolsByName := make(map[string]symbolInfo)
|
||||
for _, file := range files {
|
||||
content, err := coreio.Local.Read(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fset := token.NewFileSet()
|
||||
node, err := parser.ParseFile(fset, file, content, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for name, obj := range node.Scope.Objects {
|
||||
if !ast.IsExported(name) {
|
||||
continue
|
||||
}
|
||||
|
||||
var symbols []symbolInfo
|
||||
for name, obj := range node.Scope.Objects {
|
||||
if ast.IsExported(name) {
|
||||
kind := "unknown"
|
||||
switch obj.Kind {
|
||||
case ast.Con:
|
||||
|
|
@ -102,14 +111,59 @@ func getExportedSymbols(path string) ([]symbolInfo, error) {
|
|||
case ast.Typ:
|
||||
kind = "type"
|
||||
}
|
||||
if kind != "unknown" {
|
||||
symbols = append(symbols, symbolInfo{Name: name, Kind: kind})
|
||||
|
||||
if kind == "unknown" {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, exists := symbolsByName[name]; !exists {
|
||||
symbolsByName[name] = symbolInfo{Name: name, Kind: kind}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
symbols := make([]symbolInfo, 0, len(symbolsByName))
|
||||
for _, symbol := range symbolsByName {
|
||||
symbols = append(symbols, symbol)
|
||||
}
|
||||
|
||||
sort.Slice(symbols, func(i, j int) bool {
|
||||
if symbols[i].Name == symbols[j].Name {
|
||||
return symbols[i].Kind < symbols[j].Kind
|
||||
}
|
||||
return symbols[i].Name < symbols[j].Name
|
||||
})
|
||||
|
||||
return symbols, nil
|
||||
}
|
||||
|
||||
func listGoFiles(path string) ([]string, error) {
|
||||
entries, err := coreio.Local.List(path)
|
||||
if err == nil {
|
||||
files := make([]string, 0, len(entries))
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
name := entry.Name()
|
||||
if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
|
||||
continue
|
||||
}
|
||||
|
||||
files = append(files, filepath.Join(path, name))
|
||||
}
|
||||
sort.Strings(files)
|
||||
return files, nil
|
||||
}
|
||||
|
||||
if coreio.Local.IsFile(path) {
|
||||
return []string{path}, nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
const publicAPITemplate = `// package {{.ServiceName}} provides the public API for the {{.ServiceName}} service.
|
||||
package {{.ServiceName}}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,11 +5,11 @@ import (
|
|||
"os"
|
||||
"time"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"forge.lthn.ai/core/go-container/devenv"
|
||||
"dappco.re/go/core/i18n"
|
||||
"dappco.re/go/core/io"
|
||||
log "dappco.re/go/core/log"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"forge.lthn.ai/core/go-container/devenv"
|
||||
)
|
||||
|
||||
// addVMCommands adds the dev environment VM commands to the dev parent command.
|
||||
|
|
@ -190,10 +190,13 @@ func runVMStop() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// addVMStatusCommand adds the 'devops status' command.
|
||||
// addVMStatusCommand adds the 'dev status' command.
|
||||
func addVMStatusCommand(parent *cli.Command) {
|
||||
statusCmd := &cli.Command{
|
||||
Use: "vm-status",
|
||||
Use: "status",
|
||||
Aliases: []string{
|
||||
"vm-status",
|
||||
},
|
||||
Short: i18n.T("cmd.dev.vm.status.short"),
|
||||
Long: i18n.T("cmd.dev.vm.status.long"),
|
||||
RunE: func(cmd *cli.Command, args []string) error {
|
||||
|
|
|
|||
26
cmd/dev/cmd_vm_test.go
Normal file
26
cmd/dev/cmd_vm_test.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
func TestAddVMStatusCommand_Good(t *testing.T) {
|
||||
root := &cli.Command{Use: "core"}
|
||||
|
||||
AddDevCommands(root)
|
||||
|
||||
statusCmd, _, err := root.Find([]string{"dev", "status"})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, statusCmd)
|
||||
require.Equal(t, "status", statusCmd.Use)
|
||||
require.Contains(t, statusCmd.Aliases, "vm-status")
|
||||
|
||||
aliasCmd, _, err := root.Find([]string{"dev", "vm-status"})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, aliasCmd)
|
||||
require.Equal(t, statusCmd, aliasCmd)
|
||||
}
|
||||
|
|
@ -7,10 +7,10 @@ import (
|
|||
"slices"
|
||||
"strings"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"dappco.re/go/core/io"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// Workflow command flags
|
||||
|
|
@ -117,6 +117,10 @@ func runWorkflowList(registryPath string) error {
|
|||
for _, wf := range templateWorkflows {
|
||||
templateSet[wf] = true
|
||||
}
|
||||
templateNames := slices.Sorted(maps.Keys(templateSet))
|
||||
if len(templateNames) > 0 {
|
||||
cli.Print("%s %s\n\n", i18n.T("cmd.dev.workflow.templates"), strings.Join(templateNames, ", "))
|
||||
}
|
||||
|
||||
// Build table
|
||||
headers := []string{i18n.T("cmd.dev.workflow.header.repo")}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
package dev
|
||||
|
||||
import (
|
||||
"maps"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"dappco.re/go/core/io"
|
||||
|
|
@ -106,3 +108,21 @@ func TestFindTemplateWorkflow_NotFound(t *testing.T) {
|
|||
t.Errorf("Expected empty string for non-existent template, got %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTemplateNames_Good(t *testing.T) {
|
||||
templateSet := map[string]bool{
|
||||
"z.yml": true,
|
||||
"a.yml": true,
|
||||
"m.yml": true,
|
||||
}
|
||||
|
||||
names := slices.Sorted(maps.Keys(templateSet))
|
||||
|
||||
if len(names) != 3 {
|
||||
t.Fatalf("Expected 3 template names, got %d", len(names))
|
||||
}
|
||||
|
||||
if names[0] != "a.yml" || names[1] != "m.yml" || names[2] != "z.yml" {
|
||||
t.Fatalf("Expected sorted template names, got %v", names)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ import (
|
|||
"strings"
|
||||
|
||||
"dappco.re/go/agent/cmd/workspace"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"dappco.re/go/core/io"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// loadRegistryWithConfig loads the registry and applies workspace configuration.
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ import (
|
|||
"os"
|
||||
"os/exec"
|
||||
|
||||
"dappco.re/go/core"
|
||||
agentic "dappco.re/go/agent/pkg/lifecycle"
|
||||
"dappco.re/go/core"
|
||||
)
|
||||
|
||||
// ServiceOptions for configuring the dev service.
|
||||
|
|
|
|||
|
|
@ -7,10 +7,10 @@ import (
|
|||
"strings"
|
||||
|
||||
"dappco.re/go/agent/cmd/workspace"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"dappco.re/go/core/io"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// RepoDocInfo holds documentation info for a repo
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"dappco.re/go/core/io"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// Flag variables for sync command
|
||||
|
|
@ -140,9 +140,7 @@ func runPHPSync(reg *repos.Registry, basePath string, outputDir string, dryRun b
|
|||
repoOutDir := filepath.Join(outputDir, outName)
|
||||
|
||||
// Clear existing directory (recursively)
|
||||
_ = io.Local.DeleteAll(repoOutDir)
|
||||
|
||||
if err := io.Local.EnsureDir(repoOutDir); err != nil {
|
||||
if err := resetOutputDir(repoOutDir); err != nil {
|
||||
cli.Print(" %s %s: %s\n", errorStyle.Render("✗"), info.Name, err)
|
||||
continue
|
||||
}
|
||||
|
|
@ -275,6 +273,7 @@ func runZensicalSync(reg *repos.Registry, basePath string, outputDir string, dry
|
|||
|
||||
cli.Blank()
|
||||
var synced int
|
||||
repoLoop:
|
||||
for _, info := range docsInfo {
|
||||
section, folder := zensicalOutputName(info.Name)
|
||||
|
||||
|
|
@ -283,6 +282,11 @@ func runZensicalSync(reg *repos.Registry, basePath string, outputDir string, dry
|
|||
destDir = filepath.Join(destDir, folder)
|
||||
}
|
||||
|
||||
if err := resetOutputDir(destDir); err != nil {
|
||||
cli.Print(" %s %s: %s\n", errorStyle.Render("✗"), info.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
weight := 10
|
||||
docsDir := filepath.Join(info.Path, "docs")
|
||||
for _, f := range info.DocsFiles {
|
||||
|
|
@ -295,9 +299,8 @@ func runZensicalSync(reg *repos.Registry, basePath string, outputDir string, dry
|
|||
weight += 10
|
||||
}
|
||||
|
||||
if info.Readme != "" && folder != "" {
|
||||
dst := filepath.Join(destDir, "index.md")
|
||||
if err := copyWithFrontMatter(info.Readme, dst, 1); err != nil {
|
||||
if info.Readme != "" {
|
||||
if err := copyZensicalReadme(info.Readme, destDir); err != nil {
|
||||
cli.Print(" %s README: %s\n", errorStyle.Render("✗"), err)
|
||||
}
|
||||
}
|
||||
|
|
@ -305,6 +308,10 @@ func runZensicalSync(reg *repos.Registry, basePath string, outputDir string, dry
|
|||
if len(info.KBFiles) > 0 {
|
||||
suffix := strings.TrimPrefix(info.Name, "go-")
|
||||
kbDestDir := filepath.Join(outputDir, "kb", suffix)
|
||||
if err := resetOutputDir(kbDestDir); err != nil {
|
||||
cli.Print(" %s KB: %s\n", errorStyle.Render("✗"), err)
|
||||
continue repoLoop
|
||||
}
|
||||
kbDir := filepath.Join(info.Path, "KB")
|
||||
kbWeight := 10
|
||||
for _, f := range info.KBFiles {
|
||||
|
|
@ -326,10 +333,24 @@ func runZensicalSync(reg *repos.Registry, basePath string, outputDir string, dry
|
|||
return nil
|
||||
}
|
||||
|
||||
// copyZensicalReadme copies a repository README to index.md in the target directory.
|
||||
func copyZensicalReadme(src, destDir string) error {
|
||||
dst := filepath.Join(destDir, "index.md")
|
||||
return copyWithFrontMatter(src, dst, 1)
|
||||
}
|
||||
|
||||
// resetOutputDir clears and recreates a target directory before copying files into it.
|
||||
func resetOutputDir(dir string) error {
|
||||
if err := io.Local.DeleteAll(dir); err != nil {
|
||||
return err
|
||||
}
|
||||
return io.Local.EnsureDir(dir)
|
||||
}
|
||||
|
||||
// goHelpOutputName maps repo name to output folder name for go-help.
|
||||
func goHelpOutputName(repoName string) string {
|
||||
if repoName == "core" {
|
||||
return "cli"
|
||||
return "go"
|
||||
}
|
||||
if strings.HasPrefix(repoName, "core-") {
|
||||
return strings.TrimPrefix(repoName, "core-")
|
||||
|
|
@ -388,9 +409,7 @@ func runGoHelpSync(reg *repos.Registry, basePath string, outputDir string, dryRu
|
|||
repoOutDir := filepath.Join(outputDir, outName)
|
||||
|
||||
// Clear existing directory
|
||||
_ = io.Local.DeleteAll(repoOutDir)
|
||||
|
||||
if err := io.Local.EnsureDir(repoOutDir); err != nil {
|
||||
if err := resetOutputDir(repoOutDir); err != nil {
|
||||
cli.Print(" %s %s: %s\n", errorStyle.Render("✗"), info.Name, err)
|
||||
continue
|
||||
}
|
||||
|
|
|
|||
80
cmd/docs/cmd_sync_test.go
Normal file
80
cmd/docs/cmd_sync_test.go
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
package docs
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCopyZensicalReadme_Good(t *testing.T) {
|
||||
srcDir := t.TempDir()
|
||||
destDir := t.TempDir()
|
||||
|
||||
src := filepath.Join(srcDir, "README.md")
|
||||
if err := os.WriteFile(src, []byte("# Hello\n\nBody text.\n"), 0o644); err != nil {
|
||||
t.Fatalf("write source README: %v", err)
|
||||
}
|
||||
|
||||
if err := copyZensicalReadme(src, destDir); err != nil {
|
||||
t.Fatalf("copy README: %v", err)
|
||||
}
|
||||
|
||||
output := filepath.Join(destDir, "index.md")
|
||||
data, err := os.ReadFile(output)
|
||||
if err != nil {
|
||||
t.Fatalf("read output index.md: %v", err)
|
||||
}
|
||||
|
||||
content := string(data)
|
||||
if !strings.HasPrefix(content, "---\n") {
|
||||
t.Fatalf("expected Hugo front matter at start, got: %q", content)
|
||||
}
|
||||
if !strings.Contains(content, "title: \"README\"") {
|
||||
t.Fatalf("expected README title in front matter, got: %q", content)
|
||||
}
|
||||
if !strings.Contains(content, "Body text.") {
|
||||
t.Fatalf("expected README body to be preserved, got: %q", content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResetOutputDir_ClearsExistingFiles(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
|
||||
stale := filepath.Join(dir, "stale.md")
|
||||
if err := os.WriteFile(stale, []byte("old content"), 0o644); err != nil {
|
||||
t.Fatalf("write stale file: %v", err)
|
||||
}
|
||||
|
||||
if err := resetOutputDir(dir); err != nil {
|
||||
t.Fatalf("reset output dir: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(stale); !os.IsNotExist(err) {
|
||||
t.Fatalf("expected stale file to be removed, got err=%v", err)
|
||||
}
|
||||
|
||||
info, err := os.Stat(dir)
|
||||
if err != nil {
|
||||
t.Fatalf("stat output dir: %v", err)
|
||||
}
|
||||
if !info.IsDir() {
|
||||
t.Fatalf("expected output dir to exist as a directory")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGoHelpOutputName_Good(t *testing.T) {
|
||||
cases := map[string]string{
|
||||
"core": "go",
|
||||
"core-admin": "admin",
|
||||
"core-api": "api",
|
||||
"go-example": "go-example",
|
||||
"custom-repo": "custom-repo",
|
||||
}
|
||||
|
||||
for input, want := range cases {
|
||||
if got := goHelpOutputName(input); got != want {
|
||||
t.Fatalf("goHelpOutputName(%q) = %q, want %q", input, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,8 +6,8 @@ import (
|
|||
"path/filepath"
|
||||
"runtime"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
coreio "dappco.re/go/core/io"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
|
|
@ -32,7 +32,7 @@ func DefaultCIConfig() *CIConfig {
|
|||
return &CIConfig{
|
||||
Tap: "host-uk/tap",
|
||||
Formula: "core",
|
||||
ScoopBucket: "https://https://forge.lthn.ai/core/scoop-bucket.git",
|
||||
ScoopBucket: "https://forge.lthn.ai/core/scoop-bucket.git",
|
||||
ChocolateyPkg: "core-cli",
|
||||
Repository: "host-uk/core",
|
||||
DefaultVersion: "dev",
|
||||
|
|
|
|||
64
cmd/setup/cmd_ci_test.go
Normal file
64
cmd/setup/cmd_ci_test.go
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
package setup
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func captureStdout(t *testing.T, fn func() error) (string, error) {
|
||||
t.Helper()
|
||||
|
||||
oldStdout := os.Stdout
|
||||
r, w, err := os.Pipe()
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
_ = r.Close()
|
||||
}()
|
||||
|
||||
os.Stdout = w
|
||||
defer func() {
|
||||
os.Stdout = oldStdout
|
||||
}()
|
||||
|
||||
outC := make(chan string, 1)
|
||||
errC := make(chan error, 1)
|
||||
|
||||
go func() {
|
||||
var buf bytes.Buffer
|
||||
_, copyErr := io.Copy(&buf, r)
|
||||
errC <- copyErr
|
||||
outC <- buf.String()
|
||||
}()
|
||||
|
||||
runErr := fn()
|
||||
|
||||
require.NoError(t, w.Close())
|
||||
require.NoError(t, <-errC)
|
||||
out := <-outC
|
||||
|
||||
return out, runErr
|
||||
}
|
||||
|
||||
func TestDefaultCIConfig_Good(t *testing.T) {
|
||||
cfg := DefaultCIConfig()
|
||||
|
||||
require.Equal(t, "host-uk/tap", cfg.Tap)
|
||||
require.Equal(t, "core", cfg.Formula)
|
||||
require.Equal(t, "https://forge.lthn.ai/core/scoop-bucket.git", cfg.ScoopBucket)
|
||||
require.Equal(t, "core-cli", cfg.ChocolateyPkg)
|
||||
require.Equal(t, "host-uk/core", cfg.Repository)
|
||||
require.Equal(t, "dev", cfg.DefaultVersion)
|
||||
}
|
||||
|
||||
func TestOutputPowershellInstall_Good(t *testing.T) {
|
||||
out, err := captureStdout(t, func() error {
|
||||
return outputPowershellInstall(DefaultCIConfig(), "dev")
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, out, `scoop bucket add host-uk $ScoopBucket`)
|
||||
require.NotContains(t, out, `https://https://forge.lthn.ai/core/scoop-bucket.git`)
|
||||
}
|
||||
|
|
@ -14,11 +14,11 @@ import (
|
|||
"strings"
|
||||
|
||||
"dappco.re/go/agent/cmd/workspace"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
coreio "dappco.re/go/core/io"
|
||||
log "dappco.re/go/core/log"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// runRegistrySetup loads a registry from path and runs setup.
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ package setup
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
|
@ -15,8 +17,33 @@ import (
|
|||
"dappco.re/go/core/i18n"
|
||||
coreio "dappco.re/go/core/io"
|
||||
log "dappco.re/go/core/log"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
var repoDryRun bool
|
||||
|
||||
// addRepoCommand adds the 'repo' subcommand to generate .core configuration.
|
||||
func addRepoCommand(parent *cli.Command) {
|
||||
repoCmd := &cli.Command{
|
||||
Use: "repo",
|
||||
Short: i18n.T("cmd.setup.repo.short"),
|
||||
Long: i18n.T("cmd.setup.repo.long"),
|
||||
Args: cli.ExactArgs(0),
|
||||
RunE: func(cmd *cli.Command, args []string) error {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return log.E("setup.repo", "failed to get working directory", err)
|
||||
}
|
||||
|
||||
return runRepoSetup(cwd, repoDryRun)
|
||||
},
|
||||
}
|
||||
|
||||
repoCmd.Flags().BoolVar(&repoDryRun, "dry-run", false, i18n.T("cmd.setup.flag.dry_run"))
|
||||
|
||||
parent.AddCommand(repoCmd)
|
||||
}
|
||||
|
||||
// runRepoSetup sets up the current repository with .core/ configuration.
|
||||
func runRepoSetup(repoPath string, dryRun bool) error {
|
||||
fmt.Printf("%s %s: %s\n", dimStyle.Render(">>"), i18n.T("cmd.setup.repo.setting_up"), repoPath)
|
||||
|
|
@ -73,12 +100,12 @@ func detectProjectType(path string) string {
|
|||
if coreio.Local.IsFile(filepath.Join(path, "go.mod")) {
|
||||
return "go"
|
||||
}
|
||||
if coreio.Local.IsFile(filepath.Join(path, "composer.json")) {
|
||||
return "php"
|
||||
}
|
||||
if coreio.Local.IsFile(filepath.Join(path, "package.json")) {
|
||||
return "node"
|
||||
}
|
||||
if coreio.Local.IsFile(filepath.Join(path, "composer.json")) {
|
||||
return "php"
|
||||
}
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
|
|
@ -268,23 +295,46 @@ func detectGitHubRepo() string {
|
|||
return ""
|
||||
}
|
||||
|
||||
url := strings.TrimSpace(string(output))
|
||||
return parseGitHubRepoURL(strings.TrimSpace(string(output)))
|
||||
}
|
||||
|
||||
// Handle SSH format: git@github.com:owner/repo.git
|
||||
if strings.HasPrefix(url, "git@github.com:") {
|
||||
repo := strings.TrimPrefix(url, "git@github.com:")
|
||||
repo = strings.TrimSuffix(repo, ".git")
|
||||
return repo
|
||||
// parseGitHubRepoURL extracts owner/repo from a GitHub remote URL.
|
||||
//
|
||||
// Supports the common remote formats used by git:
|
||||
// - git@github.com:owner/repo.git
|
||||
// - ssh://git@github.com/owner/repo.git
|
||||
// - https://github.com/owner/repo.git
|
||||
// - git://github.com/owner/repo.git
|
||||
func parseGitHubRepoURL(remote string) string {
|
||||
remote = strings.TrimSpace(remote)
|
||||
if remote == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Handle HTTPS format: https://github.com/owner/repo.git
|
||||
if strings.Contains(url, "github.com/") {
|
||||
parts := strings.Split(url, "github.com/")
|
||||
if len(parts) == 2 {
|
||||
repo := strings.TrimSuffix(parts[1], ".git")
|
||||
// Handle SSH-style scp syntax first.
|
||||
if strings.HasPrefix(remote, "git@github.com:") {
|
||||
repo := strings.TrimPrefix(remote, "git@github.com:")
|
||||
return strings.TrimSuffix(repo, ".git")
|
||||
}
|
||||
|
||||
if parsed, err := url.Parse(remote); err == nil && parsed.Host != "" {
|
||||
host := strings.TrimPrefix(parsed.Hostname(), "www.")
|
||||
if host == "github.com" {
|
||||
repo := strings.TrimPrefix(parsed.Path, "/")
|
||||
repo = strings.TrimSuffix(repo, ".git")
|
||||
repo = strings.TrimSuffix(repo, "/")
|
||||
return repo
|
||||
}
|
||||
}
|
||||
|
||||
if strings.Contains(remote, "github.com/") {
|
||||
parts := strings.SplitN(remote, "github.com/", 2)
|
||||
if len(parts) == 2 {
|
||||
repo := strings.TrimPrefix(parts[1], "/")
|
||||
repo = strings.TrimSuffix(repo, ".git")
|
||||
return strings.TrimSuffix(repo, "/")
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
|
|
|||
52
cmd/setup/cmd_repo_test.go
Normal file
52
cmd/setup/cmd_repo_test.go
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
package setup
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestRunRepoSetup_CreatesCoreConfigs(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
||||
|
||||
require.NoError(t, runRepoSetup(dir, false))
|
||||
|
||||
for _, name := range []string{"build.yaml", "release.yaml", "test.yaml"} {
|
||||
path := filepath.Join(dir, ".core", name)
|
||||
_, err := os.Stat(path)
|
||||
require.NoErrorf(t, err, "expected %s to exist", path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDetectProjectType_PrefersPackageOverComposer(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package.json"), []byte("{}\n"), 0o644))
|
||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "composer.json"), []byte("{}\n"), 0o644))
|
||||
|
||||
require.Equal(t, "node", detectProjectType(dir))
|
||||
}
|
||||
|
||||
func TestParseGitHubRepoURL_Good(t *testing.T) {
|
||||
cases := map[string]string{
|
||||
"git@github.com:owner/repo.git": "owner/repo",
|
||||
"ssh://git@github.com/owner/repo.git": "owner/repo",
|
||||
"https://github.com/owner/repo.git": "owner/repo",
|
||||
"git://github.com/owner/repo.git": "owner/repo",
|
||||
"https://www.github.com/owner/repo": "owner/repo",
|
||||
"git@github.com:owner/nested/repo.git": "owner/nested/repo",
|
||||
"ssh://git@github.com/owner/nested/repo/": "owner/nested/repo",
|
||||
"ssh://git@github.com:443/owner/repo.git": "owner/repo",
|
||||
"https://example.com/owner/repo.git": "",
|
||||
"git@bitbucket.org:owner/repo.git": "",
|
||||
" ssh://git@github.com/owner/repo.git ": "owner/repo",
|
||||
}
|
||||
|
||||
for remote, expected := range cases {
|
||||
t.Run(remote, func(t *testing.T) {
|
||||
require.Equal(t, expected, parseGitHubRepoURL(remote))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -2,8 +2,8 @@
|
|||
package setup
|
||||
|
||||
import (
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
)
|
||||
|
||||
// Style aliases from shared package
|
||||
|
|
@ -51,6 +51,7 @@ func initSetupFlags() {
|
|||
// AddSetupCommand adds the 'setup' command to the given parent command.
|
||||
func AddSetupCommand(root *cli.Command) {
|
||||
initSetupFlags()
|
||||
addRepoCommand(setupCmd)
|
||||
addGitHubCommand(setupCmd)
|
||||
root.AddCommand(setupCmd)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,9 +7,9 @@ import (
|
|||
"os"
|
||||
"slices"
|
||||
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"dappco.re/go/core/i18n"
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"forge.lthn.ai/core/cli/pkg/cli"
|
||||
"golang.org/x/term"
|
||||
)
|
||||
|
||||
|
|
@ -39,6 +39,9 @@ func promptProjectName(defaultName string) (string, error) {
|
|||
// runPackageWizard presents an interactive multi-select UI for package selection.
|
||||
func runPackageWizard(reg *repos.Registry, preselectedTypes []string) ([]string, error) {
|
||||
allRepos := reg.List()
|
||||
if len(preselectedTypes) > 0 {
|
||||
allRepos = filterReposByTypes(allRepos, preselectedTypes)
|
||||
}
|
||||
|
||||
// Build options
|
||||
var options []string
|
||||
|
|
@ -57,6 +60,10 @@ func runPackageWizard(reg *repos.Registry, preselectedTypes []string) ([]string,
|
|||
options = append(options, label)
|
||||
}
|
||||
|
||||
if len(options) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
fmt.Println(cli.TitleStyle.Render(i18n.T("cmd.setup.wizard.package_selection")))
|
||||
fmt.Println(i18n.T("cmd.setup.wizard.selection_hint"))
|
||||
|
||||
|
|
@ -87,6 +94,33 @@ func runPackageWizard(reg *repos.Registry, preselectedTypes []string) ([]string,
|
|||
return selected, nil
|
||||
}
|
||||
|
||||
func filterReposByTypes(repoList []*repos.Repo, allowedTypes []string) []*repos.Repo {
|
||||
if len(allowedTypes) == 0 {
|
||||
return repoList
|
||||
}
|
||||
|
||||
allowed := make(map[string]struct{}, len(allowedTypes))
|
||||
for _, repoType := range allowedTypes {
|
||||
if repoType == "" {
|
||||
continue
|
||||
}
|
||||
allowed[repoType] = struct{}{}
|
||||
}
|
||||
|
||||
if len(allowed) == 0 {
|
||||
return repoList
|
||||
}
|
||||
|
||||
filtered := make([]*repos.Repo, 0, len(repoList))
|
||||
for _, repo := range repoList {
|
||||
if _, ok := allowed[repo.Type]; ok {
|
||||
filtered = append(filtered, repo)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
// confirmClone asks for confirmation before cloning.
|
||||
func confirmClone(count int, target string) (bool, error) {
|
||||
confirmed := cli.Confirm(i18n.T("cmd.setup.wizard.confirm_clone", map[string]any{"Count": count, "Target": target}))
|
||||
|
|
|
|||
34
cmd/setup/cmd_wizard_test.go
Normal file
34
cmd/setup/cmd_wizard_test.go
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
package setup
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"dappco.re/go/core/scm/repos"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFilterReposByTypes_Good(t *testing.T) {
|
||||
reposList := []*repos.Repo{
|
||||
{Name: "foundation-a", Type: "foundation"},
|
||||
{Name: "module-a", Type: "module"},
|
||||
{Name: "product-a", Type: "product"},
|
||||
}
|
||||
|
||||
filtered := filterReposByTypes(reposList, []string{"module", "product"})
|
||||
|
||||
require.Len(t, filtered, 2)
|
||||
require.Equal(t, "module-a", filtered[0].Name)
|
||||
require.Equal(t, "product-a", filtered[1].Name)
|
||||
}
|
||||
|
||||
func TestFilterReposByTypes_EmptyFilter_Good(t *testing.T) {
|
||||
reposList := []*repos.Repo{
|
||||
{Name: "foundation-a", Type: "foundation"},
|
||||
{Name: "module-a", Type: "module"},
|
||||
}
|
||||
|
||||
filtered := filterReposByTypes(reposList, nil)
|
||||
|
||||
require.Len(t, filtered, 2)
|
||||
require.Equal(t, reposList, filtered)
|
||||
}
|
||||
323
devkit/coverage.go
Normal file
323
devkit/coverage.go
Normal file
|
|
@ -0,0 +1,323 @@
|
|||
package devkit
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// CoveragePackage describes coverage for a single package or directory.
|
||||
type CoveragePackage struct {
|
||||
Name string `json:"name"`
|
||||
CoveredStatements int `json:"covered_statements"`
|
||||
TotalStatements int `json:"total_statements"`
|
||||
Coverage float64 `json:"coverage"`
|
||||
}
|
||||
|
||||
// CoverageSnapshot captures a point-in-time view of coverage across packages.
|
||||
type CoverageSnapshot struct {
|
||||
CapturedAt time.Time `json:"captured_at"`
|
||||
Packages []CoveragePackage `json:"packages"`
|
||||
Total CoveragePackage `json:"total"`
|
||||
}
|
||||
|
||||
// CoverageDelta describes how a single package changed between snapshots.
|
||||
type CoverageDelta struct {
|
||||
Name string `json:"name"`
|
||||
Previous float64 `json:"previous"`
|
||||
Current float64 `json:"current"`
|
||||
Delta float64 `json:"delta"`
|
||||
}
|
||||
|
||||
// CoverageComparison summarises the differences between two coverage snapshots.
|
||||
type CoverageComparison struct {
|
||||
Regressions []CoverageDelta `json:"regressions"`
|
||||
Improvements []CoverageDelta `json:"improvements"`
|
||||
NewPackages []CoveragePackage `json:"new_packages"`
|
||||
Removed []CoveragePackage `json:"removed"`
|
||||
TotalDelta float64 `json:"total_delta"`
|
||||
}
|
||||
|
||||
// CoverageStore persists coverage snapshots to disk.
|
||||
type CoverageStore struct {
|
||||
path string
|
||||
}
|
||||
|
||||
type coverageBucket struct {
|
||||
covered int
|
||||
total int
|
||||
}
|
||||
|
||||
var coverProfileLineRE = regexp.MustCompile(`^(.+?):\d+\.\d+,\d+\.\d+\s+(\d+)\s+(\d+)$`)
|
||||
var coverOutputLineRE = regexp.MustCompile(`^(?:ok|\?)?\s*(\S+)\s+.*coverage:\s+([0-9]+(?:\.[0-9]+)?)% of statements$`)
|
||||
|
||||
// NewCoverageStore creates a store backed by the given file path.
|
||||
func NewCoverageStore(path string) *CoverageStore {
|
||||
return &CoverageStore{path: path}
|
||||
}
|
||||
|
||||
// Append stores a new snapshot, creating the parent directory if needed.
|
||||
func (s *CoverageStore) Append(snapshot CoverageSnapshot) error {
|
||||
if err := os.MkdirAll(filepath.Dir(s.path), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
snapshots, err := s.Load()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
snapshot.CapturedAt = snapshot.CapturedAt.UTC()
|
||||
snapshots = append(snapshots, snapshot)
|
||||
|
||||
data, err := json.MarshalIndent(snapshots, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return os.WriteFile(s.path, data, 0o600)
|
||||
}
|
||||
|
||||
// Load reads all snapshots from disk.
|
||||
func (s *CoverageStore) Load() ([]CoverageSnapshot, error) {
|
||||
data, err := os.ReadFile(s.path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
if len(strings.TrimSpace(string(data))) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var snapshots []CoverageSnapshot
|
||||
if err := json.Unmarshal(data, &snapshots); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return snapshots, nil
|
||||
}
|
||||
|
||||
// Latest returns the newest snapshot in the store.
|
||||
func (s *CoverageStore) Latest() (CoverageSnapshot, error) {
|
||||
snapshots, err := s.Load()
|
||||
if err != nil {
|
||||
return CoverageSnapshot{}, err
|
||||
}
|
||||
if len(snapshots) == 0 {
|
||||
return CoverageSnapshot{}, fmt.Errorf("coverage store is empty")
|
||||
}
|
||||
return snapshots[len(snapshots)-1], nil
|
||||
}
|
||||
|
||||
// ParseCoverProfile parses go test -coverprofile output into a coverage snapshot.
|
||||
func ParseCoverProfile(data string) (CoverageSnapshot, error) {
|
||||
if strings.TrimSpace(data) == "" {
|
||||
return CoverageSnapshot{}, nil
|
||||
}
|
||||
|
||||
packages := make(map[string]*coverageBucket)
|
||||
total := coverageBucket{}
|
||||
|
||||
for _, rawLine := range strings.Split(strings.TrimSpace(data), "\n") {
|
||||
line := strings.TrimSpace(rawLine)
|
||||
if line == "" || strings.HasPrefix(line, "mode:") {
|
||||
continue
|
||||
}
|
||||
|
||||
match := coverProfileLineRE.FindStringSubmatch(line)
|
||||
if match == nil {
|
||||
return CoverageSnapshot{}, fmt.Errorf("invalid cover profile line: %s", line)
|
||||
}
|
||||
|
||||
file := filepath.ToSlash(match[1])
|
||||
stmts, err := strconv.Atoi(match[2])
|
||||
if err != nil {
|
||||
return CoverageSnapshot{}, err
|
||||
}
|
||||
count, err := strconv.Atoi(match[3])
|
||||
if err != nil {
|
||||
return CoverageSnapshot{}, err
|
||||
}
|
||||
|
||||
dir := path.Dir(file)
|
||||
if dir == "" {
|
||||
dir = "."
|
||||
}
|
||||
|
||||
b := packages[dir]
|
||||
if b == nil {
|
||||
b = &coverageBucket{}
|
||||
packages[dir] = b
|
||||
}
|
||||
b.total += stmts
|
||||
total.total += stmts
|
||||
if count > 0 {
|
||||
b.covered += stmts
|
||||
total.covered += stmts
|
||||
}
|
||||
}
|
||||
|
||||
return snapshotFromBuckets(packages, total), nil
|
||||
}
|
||||
|
||||
// ParseCoverOutput parses human-readable go test -cover output into a snapshot.
|
||||
func ParseCoverOutput(output string) (CoverageSnapshot, error) {
|
||||
if strings.TrimSpace(output) == "" {
|
||||
return CoverageSnapshot{}, nil
|
||||
}
|
||||
|
||||
packages := make(map[string]*CoveragePackage)
|
||||
var total CoveragePackage
|
||||
|
||||
for _, rawLine := range strings.Split(strings.TrimSpace(output), "\n") {
|
||||
line := strings.TrimSpace(rawLine)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
match := coverOutputLineRE.FindStringSubmatch(line)
|
||||
if match == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
name := match[1]
|
||||
coverage, err := strconv.ParseFloat(match[2], 64)
|
||||
if err != nil {
|
||||
return CoverageSnapshot{}, err
|
||||
}
|
||||
|
||||
pkg := &CoveragePackage{
|
||||
Name: name,
|
||||
Coverage: coverage,
|
||||
}
|
||||
packages[name] = pkg
|
||||
|
||||
total.Coverage += coverage
|
||||
total.TotalStatements++
|
||||
}
|
||||
|
||||
if len(packages) == 0 {
|
||||
return CoverageSnapshot{}, nil
|
||||
}
|
||||
|
||||
snapshot := CoverageSnapshot{
|
||||
CapturedAt: time.Now().UTC(),
|
||||
Packages: make([]CoveragePackage, 0, len(packages)),
|
||||
}
|
||||
|
||||
for _, pkg := range packages {
|
||||
snapshot.Packages = append(snapshot.Packages, *pkg)
|
||||
}
|
||||
sort.Slice(snapshot.Packages, func(i, j int) bool {
|
||||
return snapshot.Packages[i].Name < snapshot.Packages[j].Name
|
||||
})
|
||||
|
||||
snapshot.Total.Name = "total"
|
||||
if total.TotalStatements > 0 {
|
||||
snapshot.Total.Coverage = total.Coverage / float64(total.TotalStatements)
|
||||
}
|
||||
return snapshot, nil
|
||||
}
|
||||
|
||||
// CompareCoverage compares two snapshots and reports regressions and improvements.
|
||||
func CompareCoverage(previous, current CoverageSnapshot) CoverageComparison {
|
||||
prevPackages := coverageMap(previous.Packages)
|
||||
currPackages := coverageMap(current.Packages)
|
||||
|
||||
comparison := CoverageComparison{
|
||||
NewPackages: make([]CoveragePackage, 0),
|
||||
Removed: make([]CoveragePackage, 0),
|
||||
}
|
||||
|
||||
for name, curr := range currPackages {
|
||||
prev, ok := prevPackages[name]
|
||||
if !ok {
|
||||
comparison.NewPackages = append(comparison.NewPackages, curr)
|
||||
continue
|
||||
}
|
||||
|
||||
delta := curr.Coverage - prev.Coverage
|
||||
change := CoverageDelta{
|
||||
Name: name,
|
||||
Previous: prev.Coverage,
|
||||
Current: curr.Coverage,
|
||||
Delta: delta,
|
||||
}
|
||||
if delta < 0 {
|
||||
comparison.Regressions = append(comparison.Regressions, change)
|
||||
} else if delta > 0 {
|
||||
comparison.Improvements = append(comparison.Improvements, change)
|
||||
}
|
||||
}
|
||||
|
||||
for name, prev := range prevPackages {
|
||||
if _, ok := currPackages[name]; !ok {
|
||||
comparison.Removed = append(comparison.Removed, prev)
|
||||
}
|
||||
}
|
||||
|
||||
sortCoverageComparison(&comparison)
|
||||
comparison.TotalDelta = current.Total.Coverage - previous.Total.Coverage
|
||||
return comparison
|
||||
}
|
||||
|
||||
func snapshotFromBuckets(packages map[string]*coverageBucket, total coverageBucket) CoverageSnapshot {
|
||||
snapshot := CoverageSnapshot{
|
||||
CapturedAt: time.Now().UTC(),
|
||||
Packages: make([]CoveragePackage, 0, len(packages)),
|
||||
}
|
||||
|
||||
for name, b := range packages {
|
||||
snapshot.Packages = append(snapshot.Packages, coverageAverage(name, b.covered, b.total))
|
||||
}
|
||||
|
||||
sort.Slice(snapshot.Packages, func(i, j int) bool {
|
||||
return snapshot.Packages[i].Name < snapshot.Packages[j].Name
|
||||
})
|
||||
|
||||
snapshot.Total = coverageAverage("total", total.covered, total.total)
|
||||
return snapshot
|
||||
}
|
||||
|
||||
func coverageAverage(name string, covered, total int) CoveragePackage {
|
||||
pkg := CoveragePackage{
|
||||
Name: name,
|
||||
CoveredStatements: covered,
|
||||
TotalStatements: total,
|
||||
}
|
||||
if total > 0 {
|
||||
pkg.Coverage = float64(covered) / float64(total) * 100
|
||||
}
|
||||
return pkg
|
||||
}
|
||||
|
||||
func coverageMap(packages []CoveragePackage) map[string]CoveragePackage {
|
||||
result := make(map[string]CoveragePackage, len(packages))
|
||||
for _, pkg := range packages {
|
||||
result[pkg.Name] = pkg
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func sortCoverageComparison(comparison *CoverageComparison) {
|
||||
sort.Slice(comparison.Regressions, func(i, j int) bool {
|
||||
return comparison.Regressions[i].Name < comparison.Regressions[j].Name
|
||||
})
|
||||
sort.Slice(comparison.Improvements, func(i, j int) bool {
|
||||
return comparison.Improvements[i].Name < comparison.Improvements[j].Name
|
||||
})
|
||||
sort.Slice(comparison.NewPackages, func(i, j int) bool {
|
||||
return comparison.NewPackages[i].Name < comparison.NewPackages[j].Name
|
||||
})
|
||||
sort.Slice(comparison.Removed, func(i, j int) bool {
|
||||
return comparison.Removed[i].Name < comparison.Removed[j].Name
|
||||
})
|
||||
}
|
||||
108
devkit/coverage_test.go
Normal file
108
devkit/coverage_test.go
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
package devkit
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestParseCoverProfile_Good(t *testing.T) {
|
||||
snapshot, err := ParseCoverProfile(`mode: set
|
||||
github.com/acme/project/foo/foo.go:1.1,3.1 2 1
|
||||
github.com/acme/project/foo/bar.go:1.1,4.1 3 0
|
||||
github.com/acme/project/baz/baz.go:1.1,2.1 4 4
|
||||
`)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, snapshot.Packages, 2)
|
||||
require.Equal(t, "github.com/acme/project/baz", snapshot.Packages[0].Name)
|
||||
require.Equal(t, "github.com/acme/project/foo", snapshot.Packages[1].Name)
|
||||
require.InDelta(t, 100.0, snapshot.Packages[0].Coverage, 0.0001)
|
||||
require.InDelta(t, 40.0, snapshot.Packages[1].Coverage, 0.0001)
|
||||
require.InDelta(t, 66.6667, snapshot.Total.Coverage, 0.0001)
|
||||
}
|
||||
|
||||
func TestParseCoverProfile_Bad(t *testing.T) {
|
||||
_, err := ParseCoverProfile("mode: set\nbroken line")
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestParseCoverOutput_Good(t *testing.T) {
|
||||
snapshot, err := ParseCoverOutput(`ok github.com/acme/project/foo 0.123s coverage: 75.0% of statements
|
||||
ok github.com/acme/project/bar 0.456s coverage: 50.0% of statements
|
||||
`)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, snapshot.Packages, 2)
|
||||
require.Equal(t, "github.com/acme/project/bar", snapshot.Packages[0].Name)
|
||||
require.Equal(t, "github.com/acme/project/foo", snapshot.Packages[1].Name)
|
||||
require.InDelta(t, 62.5, snapshot.Total.Coverage, 0.0001)
|
||||
}
|
||||
|
||||
func TestCompareCoverage_Good(t *testing.T) {
|
||||
previous := CoverageSnapshot{
|
||||
Packages: []CoveragePackage{
|
||||
{Name: "pkg/a", Coverage: 90.0},
|
||||
{Name: "pkg/b", Coverage: 80.0},
|
||||
},
|
||||
Total: CoveragePackage{Name: "total", Coverage: 85.0},
|
||||
}
|
||||
current := CoverageSnapshot{
|
||||
Packages: []CoveragePackage{
|
||||
{Name: "pkg/a", Coverage: 87.5},
|
||||
{Name: "pkg/b", Coverage: 82.0},
|
||||
{Name: "pkg/c", Coverage: 100.0},
|
||||
},
|
||||
Total: CoveragePackage{Name: "total", Coverage: 89.0},
|
||||
}
|
||||
|
||||
comparison := CompareCoverage(previous, current)
|
||||
require.Len(t, comparison.Regressions, 1)
|
||||
require.Len(t, comparison.Improvements, 1)
|
||||
require.Len(t, comparison.NewPackages, 1)
|
||||
require.Empty(t, comparison.Removed)
|
||||
require.Equal(t, "pkg/a", comparison.Regressions[0].Name)
|
||||
require.Equal(t, "pkg/b", comparison.Improvements[0].Name)
|
||||
require.Equal(t, "pkg/c", comparison.NewPackages[0].Name)
|
||||
require.InDelta(t, 4.0, comparison.TotalDelta, 0.0001)
|
||||
}
|
||||
|
||||
func TestCoverageStore_Good(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
store := NewCoverageStore(filepath.Join(dir, "coverage.json"))
|
||||
|
||||
first := CoverageSnapshot{
|
||||
CapturedAt: time.Date(2026, 4, 1, 10, 0, 0, 0, time.UTC),
|
||||
Packages: []CoveragePackage{{Name: "pkg/a", Coverage: 80.0}},
|
||||
Total: CoveragePackage{Name: "total", Coverage: 80.0},
|
||||
}
|
||||
second := CoverageSnapshot{
|
||||
CapturedAt: time.Date(2026, 4, 1, 11, 0, 0, 0, time.UTC),
|
||||
Packages: []CoveragePackage{{Name: "pkg/a", Coverage: 82.5}},
|
||||
Total: CoveragePackage{Name: "total", Coverage: 82.5},
|
||||
}
|
||||
|
||||
require.NoError(t, store.Append(first))
|
||||
require.NoError(t, store.Append(second))
|
||||
|
||||
snapshots, err := store.Load()
|
||||
require.NoError(t, err)
|
||||
require.Len(t, snapshots, 2)
|
||||
require.Equal(t, first.CapturedAt, snapshots[0].CapturedAt)
|
||||
require.Equal(t, second.CapturedAt, snapshots[1].CapturedAt)
|
||||
|
||||
latest, err := store.Latest()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, second.CapturedAt, latest.CapturedAt)
|
||||
}
|
||||
|
||||
func TestCoverageStore_Bad(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "coverage.json")
|
||||
require.NoError(t, os.WriteFile(path, []byte("{"), 0o600))
|
||||
|
||||
store := NewCoverageStore(path)
|
||||
_, err := store.Load()
|
||||
require.Error(t, err)
|
||||
}
|
||||
109
devkit/scan_secrets.go
Normal file
109
devkit/scan_secrets.go
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
package devkit
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var scanSecretsRunner = runGitleaksDetect
|
||||
|
||||
// ScanSecrets runs gitleaks against the supplied directory and parses the CSV report.
|
||||
func ScanSecrets(dir string) ([]Finding, error) {
|
||||
output, err := scanSecretsRunner(dir)
|
||||
findings, parseErr := parseGitleaksCSV(output)
|
||||
if parseErr != nil {
|
||||
return nil, parseErr
|
||||
}
|
||||
if err != nil && len(findings) == 0 {
|
||||
return nil, err
|
||||
}
|
||||
return findings, nil
|
||||
}
|
||||
|
||||
func runGitleaksDetect(dir string) ([]byte, error) {
|
||||
bin, err := exec.LookPath("gitleaks")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cmd := exec.CommandContext(context.Background(), bin,
|
||||
"detect",
|
||||
"--no-banner",
|
||||
"--no-color",
|
||||
"--no-git",
|
||||
"--source", dir,
|
||||
"--report-format", "csv",
|
||||
"--report-path", "-",
|
||||
)
|
||||
|
||||
return cmd.Output()
|
||||
}
|
||||
|
||||
func parseGitleaksCSV(data []byte) ([]Finding, error) {
|
||||
if len(data) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
reader := csv.NewReader(strings.NewReader(string(data)))
|
||||
reader.FieldsPerRecord = -1
|
||||
|
||||
rows, err := reader.ReadAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(rows) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
header := make(map[string]int, len(rows[0]))
|
||||
for idx, name := range rows[0] {
|
||||
header[normalizeCSVHeader(name)] = idx
|
||||
}
|
||||
|
||||
var findings []Finding
|
||||
for _, row := range rows[1:] {
|
||||
finding := Finding{
|
||||
Path: csvField(row, header, "file", "path"),
|
||||
Line: csvIntField(row, header, "startline", "line"),
|
||||
Column: csvIntField(row, header, "startcolumn", "column"),
|
||||
Rule: csvField(row, header, "ruleid", "rule", "name"),
|
||||
Snippet: csvField(row, header, "match", "secret", "description", "message"),
|
||||
}
|
||||
|
||||
if finding.Snippet == "" {
|
||||
finding.Snippet = csvField(row, header, "filename")
|
||||
}
|
||||
findings = append(findings, finding)
|
||||
}
|
||||
|
||||
return findings, nil
|
||||
}
|
||||
|
||||
func normalizeCSVHeader(name string) string {
|
||||
return strings.ToLower(strings.TrimSpace(strings.ReplaceAll(strings.ReplaceAll(name, "_", ""), " ", "")))
|
||||
}
|
||||
|
||||
func csvField(row []string, header map[string]int, names ...string) string {
|
||||
for _, name := range names {
|
||||
if idx, ok := header[name]; ok && idx < len(row) {
|
||||
return strings.TrimSpace(row[idx])
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func csvIntField(row []string, header map[string]int, names ...string) int {
|
||||
value := csvField(row, header, names...)
|
||||
if value == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
n, err := strconv.Atoi(value)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return n
|
||||
}
|
||||
64
devkit/scan_secrets_test.go
Normal file
64
devkit/scan_secrets_test.go
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
package devkit
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestScanSecrets_Good(t *testing.T) {
|
||||
originalRunner := scanSecretsRunner
|
||||
t.Cleanup(func() {
|
||||
scanSecretsRunner = originalRunner
|
||||
})
|
||||
|
||||
scanSecretsRunner = func(dir string) ([]byte, error) {
|
||||
require.Equal(t, "/tmp/project", dir)
|
||||
return []byte(`RuleID,File,StartLine,StartColumn,Description,Match
|
||||
github-token,config.yml,12,4,GitHub token detected,ghp_exampletoken1234567890
|
||||
aws-access-key-id,creds.txt,7,1,AWS access key detected,AKIA1234567890ABCDEF
|
||||
`), nil
|
||||
}
|
||||
|
||||
findings, err := ScanSecrets("/tmp/project")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, findings, 2)
|
||||
|
||||
require.Equal(t, "github-token", findings[0].Rule)
|
||||
require.Equal(t, "config.yml", findings[0].Path)
|
||||
require.Equal(t, 12, findings[0].Line)
|
||||
require.Equal(t, 4, findings[0].Column)
|
||||
require.Equal(t, "ghp_exampletoken1234567890", findings[0].Snippet)
|
||||
|
||||
require.Equal(t, "aws-access-key-id", findings[1].Rule)
|
||||
require.Equal(t, "creds.txt", findings[1].Path)
|
||||
require.Equal(t, 7, findings[1].Line)
|
||||
require.Equal(t, 1, findings[1].Column)
|
||||
require.Equal(t, "AKIA1234567890ABCDEF", findings[1].Snippet)
|
||||
}
|
||||
|
||||
func TestScanSecrets_ReportsFindingsOnExitError(t *testing.T) {
|
||||
originalRunner := scanSecretsRunner
|
||||
t.Cleanup(func() {
|
||||
scanSecretsRunner = originalRunner
|
||||
})
|
||||
|
||||
scanSecretsRunner = func(dir string) ([]byte, error) {
|
||||
return []byte(`rule_id,file,start_line,start_column,description,match
|
||||
token,test.txt,3,2,Token detected,secret-value
|
||||
`), errors.New("exit status 1")
|
||||
}
|
||||
|
||||
findings, err := ScanSecrets("/tmp/project")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, findings, 1)
|
||||
require.Equal(t, "token", findings[0].Rule)
|
||||
require.Equal(t, 3, findings[0].Line)
|
||||
require.Equal(t, 2, findings[0].Column)
|
||||
}
|
||||
|
||||
func TestParseGitleaksCSV_Bad(t *testing.T) {
|
||||
_, err := parseGitleaksCSV([]byte("rule_id,file,start_line\nunterminated,\"broken"))
|
||||
require.Error(t, err)
|
||||
}
|
||||
154
devkit/secret.go
Normal file
154
devkit/secret.go
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
package devkit
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Finding describes a secret-like match discovered while scanning source files.
|
||||
type Finding struct {
|
||||
Path string `json:"path"`
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
Rule string `json:"rule"`
|
||||
Snippet string `json:"snippet"`
|
||||
}
|
||||
|
||||
var secretRules = []struct {
|
||||
name string
|
||||
match *regexp.Regexp
|
||||
}{
|
||||
{
|
||||
name: "aws-access-key-id",
|
||||
match: regexp.MustCompile(`\bAKIA[0-9A-Z]{16}\b`),
|
||||
},
|
||||
{
|
||||
name: "github-token",
|
||||
match: regexp.MustCompile(`\bgh[pousr]_[A-Za-z0-9_]{20,}\b`),
|
||||
},
|
||||
{
|
||||
name: "generic-secret-assignment",
|
||||
match: regexp.MustCompile(`(?i)\b(?:api[_-]?key|client[_-]?secret|secret|token|password)\b\s*[:=]\s*["']?([A-Za-z0-9._\-+/]{8,})["']?`),
|
||||
},
|
||||
}
|
||||
|
||||
var skipDirs = map[string]struct{}{
|
||||
".git": {},
|
||||
"vendor": {},
|
||||
"node_modules": {},
|
||||
}
|
||||
|
||||
var textExts = map[string]struct{}{
|
||||
".go": {},
|
||||
".md": {},
|
||||
".txt": {},
|
||||
".json": {},
|
||||
".yaml": {},
|
||||
".yml": {},
|
||||
".toml": {},
|
||||
".env": {},
|
||||
".ini": {},
|
||||
".cfg": {},
|
||||
".conf": {},
|
||||
".sh": {},
|
||||
".tf": {},
|
||||
".tfvars": {},
|
||||
}
|
||||
|
||||
// ScanDir recursively scans a directory for secret-like patterns.
|
||||
func ScanDir(root string) ([]Finding, error) {
|
||||
var findings []Finding
|
||||
|
||||
if err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
name := d.Name()
|
||||
if d.IsDir() {
|
||||
if _, ok := skipDirs[name]; ok || strings.HasPrefix(name, ".") && path != root {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if !isTextCandidate(name) {
|
||||
return nil
|
||||
}
|
||||
|
||||
fileFindings, err := scanFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
findings = append(findings, fileFindings...)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return findings, nil
|
||||
}
|
||||
|
||||
func scanFile(path string) ([]Finding, error) {
|
||||
data, err := fileRead(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(data) == 0 || bytes.IndexByte(data, 0) >= 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var findings []Finding
|
||||
scanner := bufio.NewScanner(bytes.NewReader(data))
|
||||
lineNo := 0
|
||||
for scanner.Scan() {
|
||||
lineNo++
|
||||
line := scanner.Text()
|
||||
matchedSpecific := false
|
||||
for _, rule := range secretRules {
|
||||
if rule.name == "generic-secret-assignment" && matchedSpecific {
|
||||
continue
|
||||
}
|
||||
if loc := rule.match.FindStringIndex(line); loc != nil {
|
||||
findings = append(findings, Finding{
|
||||
Path: path,
|
||||
Line: lineNo,
|
||||
Column: loc[0] + 1,
|
||||
Rule: rule.name,
|
||||
Snippet: strings.TrimSpace(line),
|
||||
})
|
||||
if rule.name != "generic-secret-assignment" {
|
||||
matchedSpecific = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return findings, nil
|
||||
}
|
||||
|
||||
func isTextCandidate(name string) bool {
|
||||
if ext := strings.ToLower(filepath.Ext(name)); ext != "" {
|
||||
_, ok := textExts[ext]
|
||||
return ok
|
||||
}
|
||||
// Allow extension-less files such as Makefile, LICENSE, and .env.
|
||||
switch name {
|
||||
case "Makefile", "Dockerfile", "LICENSE", "README", "CLAUDE.md":
|
||||
return true
|
||||
}
|
||||
return strings.HasPrefix(name, ".")
|
||||
}
|
||||
|
||||
// fileRead is factored out for tests.
|
||||
var fileRead = func(path string) ([]byte, error) {
|
||||
return os.ReadFile(path)
|
||||
}
|
||||
57
devkit/secret_test.go
Normal file
57
devkit/secret_test.go
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
package devkit
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestScanDir_Good(t *testing.T) {
|
||||
root := t.TempDir()
|
||||
|
||||
require.NoError(t, os.WriteFile(filepath.Join(root, "config.yml"), []byte(`
|
||||
api_key: "ghp_abcdefghijklmnopqrstuvwxyz1234"
|
||||
`), 0o600))
|
||||
|
||||
require.NoError(t, os.Mkdir(filepath.Join(root, "nested"), 0o755))
|
||||
require.NoError(t, os.WriteFile(filepath.Join(root, "nested", "creds.txt"), []byte("access_key = AKIA1234567890ABCDEF\n"), 0o600))
|
||||
|
||||
findings, err := ScanDir(root)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, findings, 2)
|
||||
|
||||
require.Equal(t, "github-token", findings[0].Rule)
|
||||
require.Equal(t, 2, findings[0].Line)
|
||||
require.Equal(t, "config.yml", filepath.Base(findings[0].Path))
|
||||
|
||||
require.Equal(t, "aws-access-key-id", findings[1].Rule)
|
||||
require.Equal(t, 1, findings[1].Line)
|
||||
require.Equal(t, "creds.txt", filepath.Base(findings[1].Path))
|
||||
}
|
||||
|
||||
func TestScanDir_SkipsBinaryAndIgnoredDirs(t *testing.T) {
|
||||
root := t.TempDir()
|
||||
|
||||
require.NoError(t, os.Mkdir(filepath.Join(root, ".git"), 0o755))
|
||||
require.NoError(t, os.WriteFile(filepath.Join(root, ".git", "config"), []byte("token=ghp_abcdefghijklmnopqrstuvwxyz1234"), 0o600))
|
||||
require.NoError(t, os.WriteFile(filepath.Join(root, "blob.bin"), []byte{0, 1, 2, 3, 4}, 0o600))
|
||||
|
||||
findings, err := ScanDir(root)
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, findings)
|
||||
}
|
||||
|
||||
func TestScanDir_ReportsGenericAssignments(t *testing.T) {
|
||||
root := t.TempDir()
|
||||
|
||||
require.NoError(t, os.WriteFile(filepath.Join(root, "secrets.env"), []byte("client_secret: abcdefghijklmnop\n"), 0o600))
|
||||
|
||||
findings, err := ScanDir(root)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, findings, 1)
|
||||
require.Equal(t, "generic-secret-assignment", findings[0].Rule)
|
||||
require.Equal(t, 1, findings[0].Line)
|
||||
require.Equal(t, 1, findings[0].Column)
|
||||
}
|
||||
10
go.mod
10
go.mod
|
|
@ -10,8 +10,8 @@ require (
|
|||
dappco.re/go/core/io v0.1.7
|
||||
dappco.re/go/core/log v0.0.4
|
||||
dappco.re/go/core/scm v0.3.6
|
||||
forge.lthn.ai/core/cli v0.3.7
|
||||
forge.lthn.ai/core/go-container v0.1.7
|
||||
dappco.re/go/core/cli v0.3.7
|
||||
dappco.re/go/core/container v0.1.7
|
||||
github.com/kluctl/go-embed-python v0.0.0-3.13.1-20241219-1
|
||||
github.com/stretchr/testify v1.11.1
|
||||
golang.org/x/term v0.41.0
|
||||
|
|
@ -21,9 +21,9 @@ require (
|
|||
|
||||
require (
|
||||
codeberg.org/mvdkleijn/forgejo-sdk/forgejo/v2 v2.2.0 // indirect
|
||||
forge.lthn.ai/core/config v0.1.8 // indirect
|
||||
forge.lthn.ai/core/go-inference v0.1.6 // indirect
|
||||
forge.lthn.ai/core/go-store v0.1.9 // indirect
|
||||
dappco.re/go/core/config v0.1.8 // indirect
|
||||
dappco.re/go/core/inference v0.1.6 // indirect
|
||||
dappco.re/go/core/store v0.1.9 // indirect
|
||||
github.com/42wim/httpsig v1.2.3 // indirect
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
|
|
|
|||
36
go.sum
36
go.sum
|
|
@ -2,14 +2,44 @@ code.gitea.io/sdk/gitea v0.23.2 h1:iJB1FDmLegwfwjX8gotBDHdPSbk/ZR8V9VmEJaVsJYg=
|
|||
code.gitea.io/sdk/gitea v0.23.2/go.mod h1:yyF5+GhljqvA30sRDreoyHILruNiy4ASufugzYg0VHM=
|
||||
codeberg.org/mvdkleijn/forgejo-sdk/forgejo/v2 v2.2.0 h1:HTCWpzyWQOHDWt3LzI6/d2jvUDsw/vgGRWm/8BTvcqI=
|
||||
codeberg.org/mvdkleijn/forgejo-sdk/forgejo/v2 v2.2.0/go.mod h1:ZglEEDj+qkxYUb+SQIeqGtFxQrbaMYqIOgahNKb7uxs=
|
||||
dappco.re/go/agent v0.3.3 h1:hVF+ExuJ/WHuQjEdje6bSUPcUpy6jUscVl9fiuV8l74=
|
||||
dappco.re/go/agent v0.3.3/go.mod h1:UnrGApmKd/GzHEFcgy/tYuSfeJwxRx8UsxPhTjU5Ntw=
|
||||
dappco.re/go/agent v0.9.0 h1:ZfQTyUWa7YXznGLQZG9r7njwWThfLfsdIkOXJWboqZc=
|
||||
dappco.re/go/agent v0.9.0/go.mod h1:UnrGApmKd/GzHEFcgy/tYuSfeJwxRx8UsxPhTjU5Ntw=
|
||||
dappco.re/go/agent v0.10.0-alpha.1 h1:hZEm4lAqjP6wgsxelYETdMUhGTHdIBpH8hJTMO58GPA=
|
||||
dappco.re/go/agent v0.10.0-alpha.1/go.mod h1:jiShGsIfHS7b7rJXMdb30K+wKL8Kx8w/VUrLNDYRbCo=
|
||||
dappco.re/go/core v0.4.7 h1:KmIA/2lo6rl1NMtLrKqCWfMlUqpDZYH3q0/d10dTtGA=
|
||||
dappco.re/go/core v0.4.7/go.mod h1:f2/tBZ3+3IqDrg2F5F598llv0nmb/4gJVCFzM5geE4A=
|
||||
dappco.re/go/core v0.8.0-alpha.1 h1:gj7+Scv+L63Z7wMxbJYHhaRFkHJo2u4MMPuUSv/Dhtk=
|
||||
dappco.re/go/core v0.8.0-alpha.1/go.mod h1:f2/tBZ3+3IqDrg2F5F598llv0nmb/4gJVCFzM5geE4A=
|
||||
dappco.re/go/core/i18n v0.1.7 h1:JhJeptA/I42c7GhmtJDPDlvhO8Y3izQ82wpaXCy/XZ0=
|
||||
dappco.re/go/core/i18n v0.1.7/go.mod h1:0VDjwtY99NSj2iqwrI09h5GUsJeM9s48MLkr+/Dn4G8=
|
||||
dappco.re/go/core/i18n v0.2.1 h1:BeEThqNmQxFoGHY95jSlawq8+RmJBEz4fZ7D7eRQSJo=
|
||||
dappco.re/go/core/i18n v0.2.1/go.mod h1:9eSVJXr3OpIGWQvDynfhqcp27xnLMwlYLgsByU+p7ok=
|
||||
dappco.re/go/core/inference v0.2.0/go.mod h1:YLYk/FxWACGehXpHCTa/t7hFl9uvAoq83QYSBakNNlc=
|
||||
dappco.re/go/core/io v0.1.7 h1:tYyOnNFQcF//mqDLTNjBu4PV/CBizW7hm2ZnwdQQi40=
|
||||
dappco.re/go/core/io v0.1.7/go.mod h1:8lRLFk4Dnp5cR/Cyzh9WclD5566TbpdRgwcH7UZLWn4=
|
||||
dappco.re/go/core/io v0.3.0-alpha.1 h1:xTWrlk72qG0+aIyP5+Telp2nmFF0GG0EBFyVrOiBtec=
|
||||
dappco.re/go/core/io v0.3.0-alpha.1/go.mod h1:1/DWfw8U9ARKQobFJ7KhsNw2lvJGnQr/vi4Pmqxps6s=
|
||||
dappco.re/go/core/log v0.0.4 h1:qy54NYLh9nA4Kvo6XBsuAdyDD5jRc9PVnJLz9R0LiBw=
|
||||
dappco.re/go/core/log v0.0.4/go.mod h1:r14MXKOD3LF/sI8XUJQhRk/SZHBE7jAFVuCfgkXoZPw=
|
||||
dappco.re/go/core/log v0.1.0 h1:pa71Vq2TD2aoEUQWFKwNcaJ3GBY8HbaNGqtE688Unyc=
|
||||
dappco.re/go/core/log v0.1.0/go.mod h1:Nkqb8gsXhZAO8VLpx7B8i1iAmohhzqA20b9Zr8VUcJs=
|
||||
dappco.re/go/core/scm v0.3.6 h1:QUHaaPggP0+zfg7y4Q+BChQaVjx6PW+LKkOzcWYPpZ0=
|
||||
dappco.re/go/core/scm v0.3.6/go.mod h1:IWFIYDfRH0mtRdqY5zV06l/RkmkPpBM6FcbKWhg1Qa8=
|
||||
dappco.re/go/core/scm v0.5.0-alpha.1 h1:/LDH7lhVkogqJMxs3w6qmx87RuoHf3nGBNb5El2YQCg=
|
||||
dappco.re/go/core/scm v0.5.0-alpha.1/go.mod h1:qj/tAPMefuQ9HR5Sb+6qZTuaFNbvTOAhedsXHcal1qU=
|
||||
dappco.re/go/core/store v0.2.0/go.mod h1:QQGJiruayjna3nywbf0N2gcO502q/oEkPoSpBpSKbLM=
|
||||
forge.lthn.ai/core/agent v0.3.3 h1:lGpoD5OgvdJ5z+qofw8fBWkDB186QM7I2jjXEbtzSdA=
|
||||
forge.lthn.ai/core/agent v0.3.3/go.mod h1:UnrGApmKd/GzHEFcgy/tYuSfeJwxRx8UsxPhTjU5Ntw=
|
||||
forge.lthn.ai/core/agent v0.9.0 h1:O43ncyGmEKapB2kjxEzGODqOOMMT5IyZsotXieqmZGo=
|
||||
forge.lthn.ai/core/agent v0.9.0/go.mod h1:UnrGApmKd/GzHEFcgy/tYuSfeJwxRx8UsxPhTjU5Ntw=
|
||||
forge.lthn.ai/core/cli v0.3.7 h1:1GrbaGg0wDGHr6+klSbbGyN/9sSbHvFbdySJznymhwg=
|
||||
forge.lthn.ai/core/cli v0.3.7/go.mod h1:DBUppJkA9P45ZFGgI2B8VXw1rAZxamHoI/KG7fRvTNs=
|
||||
forge.lthn.ai/core/config v0.1.8 h1:xP2hys7T94QGVF/OTh84/Zr5Dm/dL/0vzjht8zi+LOg=
|
||||
forge.lthn.ai/core/config v0.1.8/go.mod h1:8epZrkwoCt+5ayrqdinOUU/+w6UoxOyv9ZrdgVOgYfQ=
|
||||
forge.lthn.ai/core/config v0.2.0-alpha.1 h1:lhxmnESx+iplLV7aqORbdOodQPSGoBk86oIxPyCXjmc=
|
||||
forge.lthn.ai/core/config v0.2.0-alpha.1/go.mod h1:AIm7VlO/h4s1LmGSn0HZb+RqAbhmZFJppVGivcsJmGE=
|
||||
forge.lthn.ai/core/go v0.3.3 h1:kYYZ2nRYy0/Be3cyuLJspRjLqTMxpckVyhb/7Sw2gd0=
|
||||
forge.lthn.ai/core/go v0.3.3/go.mod h1:Cp4ac25pghvO2iqOu59t1GyngTKVOzKB5/VPdhRi9CQ=
|
||||
forge.lthn.ai/core/go-container v0.1.7 h1:+/6NIu7OWyK2LSi2obnFF5fVpWhKiWduMiEkmnbZS6U=
|
||||
|
|
@ -18,12 +48,18 @@ forge.lthn.ai/core/go-i18n v0.1.7 h1:aHkAoc3W8fw3RPNvw/UszQbjyFWXHszzbZgty3SwyAA
|
|||
forge.lthn.ai/core/go-i18n v0.1.7/go.mod h1:0VDjwtY99NSj2iqwrI09h5GUsJeM9s48MLkr+/Dn4G8=
|
||||
forge.lthn.ai/core/go-inference v0.1.6 h1:ce42zC0zO8PuISUyAukAN1NACEdWp5wF1mRgnh5+58E=
|
||||
forge.lthn.ai/core/go-inference v0.1.6/go.mod h1:jfWz+IJX55wAH98+ic6FEqqGB6/P31CHlg7VY7pxREw=
|
||||
forge.lthn.ai/core/go-inference v0.1.7 h1:9Dy6v03jX5ZRH3n5iTzlYyGtucuBIgSe+S7GWvBzx9Q=
|
||||
forge.lthn.ai/core/go-inference v0.1.7/go.mod h1:jfWz+IJX55wAH98+ic6FEqqGB6/P31CHlg7VY7pxREw=
|
||||
forge.lthn.ai/core/go-io v0.1.7 h1:Tdb6sqh+zz1lsGJaNX9RFWM6MJ/RhSAyxfulLXrJsbk=
|
||||
forge.lthn.ai/core/go-io v0.1.7/go.mod h1:8lRLFk4Dnp5cR/Cyzh9WclD5566TbpdRgwcH7UZLWn4=
|
||||
forge.lthn.ai/core/go-log v0.0.4 h1:KTuCEPgFmuM8KJfnyQ8vPOU1Jg654W74h8IJvfQMfv0=
|
||||
forge.lthn.ai/core/go-log v0.0.4/go.mod h1:r14MXKOD3LF/sI8XUJQhRk/SZHBE7jAFVuCfgkXoZPw=
|
||||
forge.lthn.ai/core/go-scm v0.2.0 h1:TvDyCzw0HWzXjmqe6uPc46nPaRzc7MPGswmwZt0CmXo=
|
||||
forge.lthn.ai/core/go-scm v0.2.0/go.mod h1:Q/PV2FbqDlWnAOsXAd1pgSiHOlRCPW4HcPmOt8Z9H+E=
|
||||
forge.lthn.ai/core/go-scm v0.3.6 h1:LFNx8Fs82mrpxro/MPUM6tMiD4DqPmdu83UknXztQjc=
|
||||
forge.lthn.ai/core/go-scm v0.3.6/go.mod h1:IWFIYDfRH0mtRdqY5zV06l/RkmkPpBM6FcbKWhg1Qa8=
|
||||
forge.lthn.ai/core/go-store v0.1.6 h1:7T+K5cciXOaWRxge0WnGkt0PcK3epliWBa1G2FLEuac=
|
||||
forge.lthn.ai/core/go-store v0.1.6/go.mod h1:/2vqaAn+HgGU14N29B+vIfhjIsBzy7RC+AluI6BIUKI=
|
||||
forge.lthn.ai/core/go-store v0.1.9 h1:DGO2sUo2i/csWbhw7zxU7oyGF2FJT72/8w47GhZ1joM=
|
||||
forge.lthn.ai/core/go-store v0.1.9/go.mod h1:VNnHh94TMD3+L+sSgvxn0GHtDKhJR8FD6JiuIuRtjuk=
|
||||
github.com/42wim/httpsig v1.2.3 h1:xb0YyWhkYj57SPtfSttIobJUPJZB9as1nsfo7KWVcEs=
|
||||
|
|
|
|||
|
|
@ -8,7 +8,11 @@
|
|||
"short": "Multi-repo development workflows",
|
||||
"long": "Development workflow commands for managing multiple repositories.\n\nIncludes git operations, forge integration, CI status, and dev environment management.",
|
||||
"api": {
|
||||
"short": "API synchronisation tools"
|
||||
"short": "API synchronisation tools",
|
||||
"test_gen": {
|
||||
"short": "Generate public API test stubs",
|
||||
"long": "Scan internal service packages and generate compile-time tests for their public API wrappers."
|
||||
}
|
||||
},
|
||||
"health": {
|
||||
"short": "Quick health check across all repos",
|
||||
|
|
@ -147,7 +151,7 @@
|
|||
"flag": {
|
||||
"command": "Shell command to run in each repo",
|
||||
"script": "Script file to run in each repo",
|
||||
"repos": "Comma-separated list of repo names to target",
|
||||
"repos": "Comma-separated list of repo names, paths, or glob patterns to target",
|
||||
"commit": "Commit changes after running",
|
||||
"message": "Commit message (required with --commit)",
|
||||
"push": "Push after committing",
|
||||
|
|
@ -169,6 +173,8 @@
|
|||
"long": "Copy a file or directory to matching repos, optionally committing and pushing the changes.\n\nDesigned for safe file distribution by AI agents.",
|
||||
"source": "Source",
|
||||
"targets": "Targets",
|
||||
"warning": "This will copy files into each target repo.",
|
||||
"confirm": "Sync these repos?",
|
||||
"dry_run_mode": "[dry-run] No changes will be made",
|
||||
"no_changes": "no changes",
|
||||
"summary": "Summary",
|
||||
|
|
@ -177,7 +183,8 @@
|
|||
"message": "Commit message (omit to leave uncommitted)",
|
||||
"push": "Push after committing",
|
||||
"co_author": "Co-author for commits",
|
||||
"dry_run": "Preview without making changes"
|
||||
"dry_run": "Preview without making changes",
|
||||
"yes": "Skip confirmation prompt"
|
||||
},
|
||||
"error": {
|
||||
"source_not_found": "Source not found: {{.Path}}",
|
||||
|
|
@ -203,6 +210,7 @@
|
|||
"skipped_count": "{{.Count}} skipped",
|
||||
"synced_count": "{{.Count}} synced",
|
||||
"would_sync_count": "{{.Count}} would sync",
|
||||
"templates": "Templates",
|
||||
"header": {
|
||||
"repo": "Repo"
|
||||
},
|
||||
|
|
@ -396,6 +404,14 @@
|
|||
"select_packages": "Select packages to clone",
|
||||
"confirm_clone": "Clone {{.Count}} package(s) to {{.Target}}?"
|
||||
},
|
||||
"repo": {
|
||||
"short": "Generate .core config for a repo",
|
||||
"long": "Detect the current project type and generate .core/build.yaml, release.yaml, and test.yaml for the repository.",
|
||||
"setting_up": "Setting up repo",
|
||||
"detected_type": "Detected project type",
|
||||
"would_create": "Would create",
|
||||
"created": "Created"
|
||||
},
|
||||
"github": {
|
||||
"short": "Configure GitHub repo settings",
|
||||
"long": "Apply standardised GitHub settings (labels, webhooks, branch protection, security) to repos.",
|
||||
|
|
@ -425,12 +441,6 @@
|
|||
"to_create": "To create",
|
||||
"to_update": "To update",
|
||||
"to_delete": "To delete"
|
||||
},
|
||||
"repo": {
|
||||
"setting_up": "Setting up repo",
|
||||
"detected_type": "Detected project type",
|
||||
"would_create": "Would create",
|
||||
"created": "Created"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue